mirror of
https://github.com/junegunn/fzf.git
synced 2025-11-08 11:23:47 -05:00
Compare commits
2170 Commits
0.16.3
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
76dcfe85f2 | ||
|
|
8cdfb23df6 | ||
|
|
4ffde48e2f | ||
|
|
f2b33f038a | ||
|
|
d5913bf86e | ||
|
|
0e9026b817 | ||
|
|
ab407c4645 | ||
|
|
91c4bef35f | ||
|
|
bf77206221 | ||
|
|
0cb1be3f04 | ||
|
|
01cb38a5fb | ||
|
|
c38c6cad79 | ||
|
|
ba6fc40cfd | ||
|
|
dd46a256c0 | ||
|
|
d19ce0ad8d | ||
|
|
ed7becfb47 | ||
|
|
9ace1351ff | ||
|
|
e1de29bc40 | ||
|
|
0df7d10550 | ||
|
|
91e119a77e | ||
|
|
3984161f6c | ||
|
|
91beacf0f4 | ||
|
|
e6ad01fb90 | ||
|
|
ce2200e908 | ||
|
|
548061dbde | ||
|
|
8f0c91545d | ||
|
|
0eefcf348e | ||
|
|
c1f8d18a0c | ||
|
|
8585969d6d | ||
|
|
8a943a9b1a | ||
|
|
c87a8eccd4 | ||
|
|
65df0abf0e | ||
|
|
b51bc6b50e | ||
|
|
febaadbee5 | ||
|
|
0e67c5aa7a | ||
|
|
760d1b7c58 | ||
|
|
9bdacc8df2 | ||
|
|
8e936ecfa7 | ||
|
|
db2e95b1f2 | ||
|
|
687074e772 | ||
|
|
3401c2e0c7 | ||
|
|
e8cb315419 | ||
|
|
f0c4ee4047 | ||
|
|
de0df2422a | ||
|
|
148b0a94cd | ||
|
|
ca294109c3 | ||
|
|
9cad2686e9 | ||
|
|
9a45172232 | ||
|
|
2a92c7d792 | ||
|
|
f5975cf870 | ||
|
|
a67aa85820 | ||
|
|
c5cabe1691 | ||
|
|
cbed41cd82 | ||
|
|
6684771cbf | ||
|
|
f5f894ea47 | ||
|
|
a0a334fc8d | ||
|
|
ae12e94b1f | ||
|
|
9ed971cc90 | ||
|
|
129cb23078 | ||
|
|
d22812e917 | ||
|
|
10d712824a | ||
|
|
de4059c8fa | ||
|
|
416aff86e9 | ||
|
|
59dc7f178f | ||
|
|
a3c9f8bfee | ||
|
|
5546c65491 | ||
|
|
f2179f015c | ||
|
|
9a53d84b9c | ||
|
|
0a8ff7899c | ||
|
|
f9d7877d8b | ||
|
|
9fe9976591 | ||
|
|
de1824f71d | ||
|
|
19a9296c47 | ||
|
|
49967f3d45 | ||
|
|
978b6254c7 | ||
|
|
1afd143810 | ||
|
|
e5cd7f0a3a | ||
|
|
51d3940c63 | ||
|
|
179aec1578 | ||
|
|
af0014aba8 | ||
|
|
da3d995709 | ||
|
|
04c4269db3 | ||
|
|
78f238294f | ||
|
|
354d0468c1 | ||
|
|
4efcc344c3 | ||
|
|
5818b58350 | ||
|
|
7941129cc4 | ||
|
|
069d71a840 | ||
|
|
08027e7a79 | ||
|
|
ead302981c | ||
|
|
fe0ffa14ff | ||
|
|
821b8e70a8 | ||
|
|
8ceda54c7d | ||
|
|
84e515bd6e | ||
|
|
dea1df6878 | ||
|
|
0076ec2e8d | ||
|
|
82c9671f79 | ||
|
|
d364a1122e | ||
|
|
fb570e94e7 | ||
|
|
6e3c830cd2 | ||
|
|
d7db7fc132 | ||
|
|
ff1550bb38 | ||
|
|
976001e474 | ||
|
|
531dd6fb4f | ||
|
|
ba035f2a76 | ||
|
|
d34675d3c9 | ||
|
|
ce95adc66c | ||
|
|
397fe8e395 | ||
|
|
111266d832 | ||
|
|
19d858f9b6 | ||
|
|
79690724d8 | ||
|
|
5ed87ffcb9 | ||
|
|
b99cb6323f | ||
|
|
debf3d8a8a | ||
|
|
4811e52af3 | ||
|
|
8d81730ec2 | ||
|
|
330a85c25c | ||
|
|
3a21116307 | ||
|
|
247d168af6 | ||
|
|
b2a8a283c7 | ||
|
|
c36ddce36f | ||
|
|
c35d9cff7d | ||
|
|
549ce3cf6c | ||
|
|
575bc0768c | ||
|
|
89334e881e | ||
|
|
dcec6354f5 | ||
|
|
16d338da84 | ||
|
|
27258f7207 | ||
|
|
4d2d6a5ced | ||
|
|
0c00b203e6 | ||
|
|
3b68dcdd81 | ||
|
|
39db026161 | ||
|
|
f6c589c606 | ||
|
|
2bd29c3172 | ||
|
|
4a61f53b85 | ||
|
|
adc9ad28da | ||
|
|
585cfaef8b | ||
|
|
b5cd8880b1 | ||
|
|
44ddab881e | ||
|
|
bfa287b66d | ||
|
|
243e52fa11 | ||
|
|
c166eaba6d | ||
|
|
09194c24f2 | ||
|
|
ec521e47aa | ||
|
|
e3f4a51c18 | ||
|
|
0a06fd6f63 | ||
|
|
70eace5290 | ||
|
|
40f9f254a9 | ||
|
|
15d6c17390 | ||
|
|
a9d1d42436 | ||
|
|
1ecfa38eee | ||
|
|
54fd92b7dd | ||
|
|
835906d392 | ||
|
|
1721e6a1ed | ||
|
|
c7ee3b833f | ||
|
|
ffb6e28ca7 | ||
|
|
a4c6846851 | ||
|
|
d18c0bf694 | ||
|
|
4e3f9854e6 | ||
|
|
b27943423e | ||
|
|
894a1016bc | ||
|
|
efe6cddd34 | ||
|
|
f1c6bdf3e8 | ||
|
|
710659bcf5 | ||
|
|
be67775da4 | ||
|
|
2c6381499c | ||
|
|
4df842e78c | ||
|
|
b81696fb64 | ||
|
|
d226d841a1 | ||
|
|
c6d83047e5 | ||
|
|
46dabccdf1 | ||
|
|
cd9517b679 | ||
|
|
cd6677ba1d | ||
|
|
9c1a47acf7 | ||
|
|
0c280a3ce1 | ||
|
|
53e8b6e705 | ||
|
|
ad33165fa7 | ||
|
|
2055db61c8 | ||
|
|
d2c662e54f | ||
|
|
d24b58ef3f | ||
|
|
06ae9b0f3b | ||
|
|
2a9c1c06a4 | ||
|
|
90ad1b7f22 | ||
|
|
f22fbcd1af | ||
|
|
1d761684c5 | ||
|
|
e491770f1c | ||
|
|
a41be61506 | ||
|
|
1a8f633611 | ||
|
|
af8fe918d8 | ||
|
|
8ef9dfd9a2 | ||
|
|
66df24040f | ||
|
|
ed4442d9ea | ||
|
|
0edb5d5ebb | ||
|
|
9ffc2c7ca3 | ||
|
|
93cb3758b5 | ||
|
|
d22e75dcdd | ||
|
|
a1b2a6fe2c | ||
|
|
e15cba0c8c | ||
|
|
31fd207ba2 | ||
|
|
ba6d1b8772 | ||
|
|
0dce561ec9 | ||
|
|
376142eb0d | ||
|
|
664ee1f483 | ||
|
|
dac5b6fde1 | ||
|
|
998c57442b | ||
|
|
4a0ab6c926 | ||
|
|
f43e82f17f | ||
|
|
62238620a5 | ||
|
|
200745011a | ||
|
|
82fd88339b | ||
|
|
de0f2efbfb | ||
|
|
29cf28d845 | ||
|
|
7e4dbb5f3b | ||
|
|
923c3a814d | ||
|
|
779e3cc5b5 | ||
|
|
3f3d1ef8f5 | ||
|
|
f92f9f137a | ||
|
|
87f7f436e8 | ||
|
|
4298c0b1eb | ||
|
|
6c104d771e | ||
|
|
aefb9a5bc4 | ||
|
|
8868d7cbb8 | ||
|
|
10cbac20f9 | ||
|
|
26bcd0c90d | ||
|
|
fbece2bb67 | ||
|
|
0012183ede | ||
|
|
8916cbc6ab | ||
|
|
21ce70054f | ||
|
|
3ba82b6d87 | ||
|
|
e771c5d057 | ||
|
|
4e5e925e39 | ||
|
|
b7248d4115 | ||
|
|
639253840f | ||
|
|
710ebdf9c1 | ||
|
|
bb64d84ce4 | ||
|
|
cd1da27ff2 | ||
|
|
c1accc2e5b | ||
|
|
e4489dcbc1 | ||
|
|
c0d407f7ce | ||
|
|
461115afde | ||
|
|
bae1965231 | ||
|
|
b89c77ec9a | ||
|
|
1ca5f09d7b | ||
|
|
d79902ae59 | ||
|
|
77568e114f | ||
|
|
a24d274a3c | ||
|
|
dac81432d6 | ||
|
|
309b5081ef | ||
|
|
91bc4f2671 | ||
|
|
4c9d37d919 | ||
|
|
7e9566f66a | ||
|
|
3f7e8a475d | ||
|
|
1cf7c0f334 | ||
|
|
ff8ee9ee4e | ||
|
|
cbbd939a94 | ||
|
|
f232df2887 | ||
|
|
16bfb2c80c | ||
|
|
0ba066123e | ||
|
|
81c51c26cc | ||
|
|
6fa8295ac5 | ||
|
|
f975b40236 | ||
|
|
01d9d9c8c8 | ||
|
|
1eafc4e5d9 | ||
|
|
38e4020aa8 | ||
|
|
ac32fbb3b2 | ||
|
|
7d26eca5cc | ||
|
|
3347d61591 | ||
|
|
9abf2c8c9c | ||
|
|
84e2262ad6 | ||
|
|
378137d34a | ||
|
|
66ca16f836 | ||
|
|
282884ad83 | ||
|
|
7877ac42f0 | ||
|
|
19ef8891e3 | ||
|
|
bfea9e53a6 | ||
|
|
a2420026ab | ||
|
|
1be1991299 | ||
|
|
67dd7e1923 | ||
|
|
2b584586ed | ||
|
|
a1994ff0ab | ||
|
|
ca0e858871 | ||
|
|
06c6615507 | ||
|
|
818d0be436 | ||
|
|
fcd2baa945 | ||
|
|
62e0a2824a | ||
|
|
bbe1721a18 | ||
|
|
c1470a51b8 | ||
|
|
6ee31d5dc5 | ||
|
|
65d74387e7 | ||
|
|
7d0ea599c4 | ||
|
|
b7795a3dea | ||
|
|
323f6f6202 | ||
|
|
0c61223884 | ||
|
|
32234be7a2 | ||
|
|
178b49832e | ||
|
|
18cbb4a84d | ||
|
|
e84afe196a | ||
|
|
e1e171a3c4 | ||
|
|
d075c00015 | ||
|
|
6c0ca4a64a | ||
|
|
6b5d461411 | ||
|
|
7419e0dde1 | ||
|
|
cf2bb5e40e | ||
|
|
f466e94d65 | ||
|
|
eb0257d48f | ||
|
|
b83dd6c6b4 | ||
|
|
51c207448d | ||
|
|
a6a558da30 | ||
|
|
2bf5fa27be | ||
|
|
af7940746f | ||
|
|
a2aa1a156c | ||
|
|
2f8a72a42a | ||
|
|
8179ca5eaa | ||
|
|
4b74f882c7 | ||
|
|
7cf45af502 | ||
|
|
46c21158d8 | ||
|
|
80da0776f8 | ||
|
|
e91f10ab16 | ||
|
|
2c15cd7923 | ||
|
|
d6584543e9 | ||
|
|
c13228f346 | ||
|
|
7220d8233e | ||
|
|
0237bf09bf | ||
|
|
04017c25bb | ||
|
|
02199cd609 | ||
|
|
26b9f5831a | ||
|
|
243a76002c | ||
|
|
c71e4ddee4 | ||
|
|
32eb8c1be9 | ||
|
|
c587017830 | ||
|
|
fb885652cc | ||
|
|
afc2f05e5e | ||
|
|
06547d0cbe | ||
|
|
578108280e | ||
|
|
65db7352b7 | ||
|
|
a4db8bd7b5 | ||
|
|
f1c1b02d77 | ||
|
|
6580f32b43 | ||
|
|
b028cbd8bd | ||
|
|
a1a5418318 | ||
|
|
5a32634b74 | ||
|
|
c1875af70b | ||
|
|
0a10d14e19 | ||
|
|
ed8ceec66f | ||
|
|
03760011d7 | ||
|
|
0d5aebb806 | ||
|
|
1313510890 | ||
|
|
b712f2bb6a | ||
|
|
938c15ec63 | ||
|
|
3e7f032ec2 | ||
|
|
b42f5bfb19 | ||
|
|
717562b264 | ||
|
|
9d6637c1b3 | ||
|
|
56fef7c8df | ||
|
|
ba0935c71f | ||
|
|
d83eb2800a | ||
|
|
6f943112a9 | ||
|
|
f422893b8e | ||
|
|
22b498489c | ||
|
|
5460517bd2 | ||
|
|
9a6e557e52 | ||
|
|
4fdc07927f | ||
|
|
9030b67e4f | ||
|
|
43eafdf4b7 | ||
|
|
dfb88edb5e | ||
|
|
bd3e65df4d | ||
|
|
d7b13f3408 | ||
|
|
14ef8e8051 | ||
|
|
cc1d9f124e | ||
|
|
93c0299606 | ||
|
|
55e3c73221 | ||
|
|
6783417504 | ||
|
|
fa3f706e71 | ||
|
|
9c2f6cae88 | ||
|
|
a30181e240 | ||
|
|
b4ccf64e62 | ||
|
|
88d768bf6b | ||
|
|
6444cc7905 | ||
|
|
328af1f397 | ||
|
|
5ae60e2e80 | ||
|
|
0e0b868342 | ||
|
|
a5beb08ed7 | ||
|
|
45fc7b903d | ||
|
|
4f2c274942 | ||
|
|
93415493b4 | ||
|
|
8e4d338de9 | ||
|
|
8a71e091a8 | ||
|
|
120cd7f25a | ||
|
|
fb3bf6c984 | ||
|
|
d57e1f8baa | ||
|
|
15ca9ad8eb | ||
|
|
c2e1861747 | ||
|
|
543d41f3dd | ||
|
|
e5cfc988ec | ||
|
|
ee3916be17 | ||
|
|
fd513f8af8 | ||
|
|
9a2b7f559c | ||
|
|
b8d2b0df7e | ||
|
|
fe3a9c603e | ||
|
|
97030d4cb1 | ||
|
|
b2c3e567da | ||
|
|
ca5e633399 | ||
|
|
e60a9a628b | ||
|
|
0167691941 | ||
|
|
3b0f976380 | ||
|
|
7bd298b536 | ||
|
|
0476a65fca | ||
|
|
2cb2af115a | ||
|
|
789226ff6d | ||
|
|
805efc5bf1 | ||
|
|
cdcab26766 | ||
|
|
ec3acb1932 | ||
|
|
d30e37434e | ||
|
|
20d5b2e20e | ||
|
|
6c6be4ab1a | ||
|
|
d004eb1f7c | ||
|
|
3148b0f3e8 | ||
|
|
3fc0bd26a5 | ||
|
|
6c9025ff17 | ||
|
|
289997e373 | ||
|
|
db44cbdff0 | ||
|
|
da9179335c | ||
|
|
cdf641fa3e | ||
|
|
66dbee10f5 | ||
|
|
19e9b620ba | ||
|
|
e4e4700aff | ||
|
|
bb55045596 | ||
|
|
d7e51cdeb5 | ||
|
|
7f4964b366 | ||
|
|
a6957aba11 | ||
|
|
b5f94f961d | ||
|
|
e182d3db7a | ||
|
|
3e6e0528a6 | ||
|
|
ac508a1ce4 | ||
|
|
d7fc1e09b1 | ||
|
|
3b0c86e401 | ||
|
|
61d10d8ffa | ||
|
|
7d9548919e | ||
|
|
bee80a730f | ||
|
|
ac3e24c99c | ||
|
|
e7e852bdb3 | ||
|
|
2b7f168571 | ||
|
|
5b3da1d878 | ||
|
|
99f1bc0177 | ||
|
|
ed76f076dd | ||
|
|
4d357d1063 | ||
|
|
961ae1541c | ||
|
|
add1aec685 | ||
|
|
03d6ba7496 | ||
|
|
71e4d5cc51 | ||
|
|
215ab48222 | ||
|
|
0c64c68781 | ||
|
|
3ec035c68b | ||
|
|
20c7dcfbca | ||
|
|
c1b8780b9c | ||
|
|
64c61603e9 | ||
|
|
57c08d925f | ||
|
|
51623a5f6a | ||
|
|
ca3f6181d7 | ||
|
|
9c94f9c3d0 | ||
|
|
4a85843bcf | ||
|
|
d4d9b99879 | ||
|
|
6816b7d95b | ||
|
|
acdf265d7a | ||
|
|
19495eb9bb | ||
|
|
bacc8609ee | ||
|
|
99163f5afa | ||
|
|
0607227730 | ||
|
|
d938fdc496 | ||
|
|
dcb4c3d84a | ||
|
|
82ebcd9209 | ||
|
|
ff1687744d | ||
|
|
782c870fb2 | ||
|
|
71fad63829 | ||
|
|
d65c6101a8 | ||
|
|
3c40b1bd51 | ||
|
|
90a8800bb5 | ||
|
|
97f1dae2d1 | ||
|
|
e54ec05709 | ||
|
|
a24eb99679 | ||
|
|
ad113d00b7 | ||
|
|
7bd5884d12 | ||
|
|
c3505858a6 | ||
|
|
e76aa37fd4 | ||
|
|
1a32220ca9 | ||
|
|
4161403a1d | ||
|
|
53bcdc4294 | ||
|
|
30a8ef28cd | ||
|
|
855f90727a | ||
|
|
2191a44e36 | ||
|
|
952276dc2d | ||
|
|
2286edb329 | ||
|
|
a0f28583e7 | ||
|
|
8af0af3400 | ||
|
|
769e5cbb2d | ||
|
|
fc69308057 | ||
|
|
c6d620c99e | ||
|
|
f510a4def6 | ||
|
|
4ae3069c6f | ||
|
|
c0f27751d3 | ||
|
|
efbcd5a683 | ||
|
|
6a67712944 | ||
|
|
e8a690928d | ||
|
|
0eee95af57 | ||
|
|
a09c6e991a | ||
|
|
d06c5ab990 | ||
|
|
e0924d27b8 | ||
|
|
2775b771f2 | ||
|
|
cf7a3c6a0e | ||
|
|
230cc6acc3 | ||
|
|
626a23a585 | ||
|
|
74f196eebb | ||
|
|
cf2242aea3 | ||
|
|
8cb59e6fca | ||
|
|
5cce17e80a | ||
|
|
ee5302fb2d | ||
|
|
387c6ef664 | ||
|
|
581734c369 | ||
|
|
d90a969c00 | ||
|
|
2c8a96bb27 | ||
|
|
5da168db30 | ||
|
|
e215e2daf3 | ||
|
|
e28f5aa45b | ||
|
|
a2d0e8f233 | ||
|
|
303d04106a | ||
|
|
c423c496a1 | ||
|
|
4e85f72f0e | ||
|
|
dd0737aac0 | ||
|
|
f90985845d | ||
|
|
af4917dbb6 | ||
|
|
d9404fcce4 | ||
|
|
5c01fee5a9 | ||
|
|
9b27d68a37 | ||
|
|
b99d884e57 | ||
|
|
587df594b8 | ||
|
|
b896e0d314 | ||
|
|
559fb7ee45 | ||
|
|
62545cd983 | ||
|
|
c50812518e | ||
|
|
4cc5609d8b | ||
|
|
50fa90dfb8 | ||
|
|
a2c365e710 | ||
|
|
b4ddffdc61 | ||
|
|
8d4d184fc6 | ||
|
|
ea23539b59 | ||
|
|
9e92b6f11e | ||
|
|
6cbde812f6 | ||
|
|
3b2e932c13 | ||
|
|
8ff4e52641 | ||
|
|
2dbc874e3d | ||
|
|
039a2f1d04 | ||
|
|
4ef1cf5b35 | ||
|
|
b44ab9e33c | ||
|
|
8f4c23f1c4 | ||
|
|
23a391e715 | ||
|
|
035b0be29f | ||
|
|
e1fcdbc337 | ||
|
|
cfc149e994 | ||
|
|
2faffbd1b7 | ||
|
|
8db65704b9 | ||
|
|
797a01aed4 | ||
|
|
bf515a3d32 | ||
|
|
a06745826a | ||
|
|
0420ed4f2a | ||
|
|
3b944addd4 | ||
|
|
70bf8bc35d | ||
|
|
724f8a1d45 | ||
|
|
cc2b2146ee | ||
|
|
8689f5f230 | ||
|
|
e9e0011f1d | ||
|
|
5b52833785 | ||
|
|
1525768094 | ||
|
|
a70ea4654e | ||
|
|
b02bf9b6bb | ||
|
|
bee7bc5324 | ||
|
|
7c2ffd3fef | ||
|
|
db01e7dab6 | ||
|
|
2326c74eb2 | ||
|
|
b9d15569e8 | ||
|
|
c3cc378d89 | ||
|
|
27d1f5e0a8 | ||
|
|
540632bb9e | ||
|
|
d9c028c934 | ||
|
|
c54ad82e8d | ||
|
|
295b89631b | ||
|
|
6179faf778 | ||
|
|
16dc236a25 | ||
|
|
61ae9d75b6 | ||
|
|
e2401aca68 | ||
|
|
59943cbb48 | ||
|
|
02634d404d | ||
|
|
ed12925f7d | ||
|
|
e0ddb97ab4 | ||
|
|
b8c01af0fc | ||
|
|
6de0a7ddc1 | ||
|
|
79196c025d | ||
|
|
94c33ac020 | ||
|
|
b2ecb6352c | ||
|
|
9dc3ed638a | ||
|
|
0acace1ace | ||
|
|
1a2d37e1e6 | ||
|
|
22adb6494f | ||
|
|
e023736c30 | ||
|
|
dca2262fe6 | ||
|
|
0684a20ea3 | ||
|
|
a1a72bb8d1 | ||
|
|
144d55a5be | ||
|
|
7fc13c5cfd | ||
|
|
dfee7af57b | ||
|
|
9b0e2daf02 | ||
|
|
590060a16b | ||
|
|
368294edf6 | ||
|
|
c4a9ccd6af | ||
|
|
cbf91f2ed3 | ||
|
|
b1460d4787 | ||
|
|
7dc9e14874 | ||
|
|
1616ed543d | ||
|
|
dc73fba188 | ||
|
|
ef148dfd37 | ||
|
|
93bbb3032d | ||
|
|
4c83d8596d | ||
|
|
d453e6d7db | ||
|
|
c29533994f | ||
|
|
1afe13b5b5 | ||
|
|
36600eaaa9 | ||
|
|
3ee1fc2034 | ||
|
|
e2f93e5a2d | ||
|
|
cfdf2f1153 | ||
|
|
e042143e3f | ||
|
|
7c613d0d9b | ||
|
|
b00d46bc14 | ||
|
|
555b0d235b | ||
|
|
564daf9a7d | ||
|
|
41bcbe342f | ||
|
|
dbe8dc344e | ||
|
|
e33fb59da1 | ||
|
|
7aa88aa115 | ||
|
|
2b6d600879 | ||
|
|
05c765d442 | ||
|
|
49b496269c | ||
|
|
7405925952 | ||
|
|
3afd543a7e | ||
|
|
b4f2cde5ac | ||
|
|
ed53ef7cee | ||
|
|
12630b124d | ||
|
|
1d59ac09d2 | ||
|
|
a8f3a0dd59 | ||
|
|
124cd70710 | ||
|
|
782de139c8 | ||
|
|
32eb32ee5e | ||
|
|
2f51eb2b41 | ||
|
|
0ccbd79e10 | ||
|
|
99bd6de541 | ||
|
|
1fef36e4bc | ||
|
|
89375005b5 | ||
|
|
88e78c9193 | ||
|
|
29a19ad080 | ||
|
|
2a039ab746 | ||
|
|
7e9a0fcdbd | ||
|
|
7a97532547 | ||
|
|
996abb2831 | ||
|
|
da500a358f | ||
|
|
c36b846acc | ||
|
|
d9b5c9b2be | ||
|
|
3dee8778d0 | ||
|
|
d4216b0dcc | ||
|
|
bfe2bf4dce | ||
|
|
561f9291fd | ||
|
|
b5b0d6b3ea | ||
|
|
a90426b7ca | ||
|
|
303c3bae7f | ||
|
|
6b4358f641 | ||
|
|
552158f3ad | ||
|
|
7205203dc8 | ||
|
|
0cadf70072 | ||
|
|
076b3d0a9a | ||
|
|
7b0c9e04d3 | ||
|
|
573df524fe | ||
|
|
aee417c46a | ||
|
|
04db44067d | ||
|
|
5b204c54f9 | ||
|
|
daa602422d | ||
|
|
04dfb14e32 | ||
|
|
c24256cba3 | ||
|
|
685fb71d89 | ||
|
|
83b6033906 | ||
|
|
01e7668915 | ||
|
|
0994d9c881 | ||
|
|
030428ba43 | ||
|
|
8a110e02b9 | ||
|
|
86d92c17c4 | ||
|
|
c4cc7891b4 | ||
|
|
218843b9f1 | ||
|
|
d274d093af | ||
|
|
6432f00f0d | ||
|
|
4e9e842aa4 | ||
|
|
07880ca441 | ||
|
|
bcda25a513 | ||
|
|
8256fcde15 | ||
|
|
af65aa298a | ||
|
|
6834d17844 | ||
|
|
ed511d7867 | ||
|
|
cd8d736a9f | ||
|
|
0952b2dfd4 | ||
|
|
4bedd33c59 | ||
|
|
c5fb0c43f9 | ||
|
|
9e4780510e | ||
|
|
e8405f40fe | ||
|
|
065b9e6fb2 | ||
|
|
98141ca7d8 | ||
|
|
501577ab28 | ||
|
|
5669f48343 | ||
|
|
24ff66d4a9 | ||
|
|
bf184449bc | ||
|
|
7b98c2c653 | ||
|
|
b6add2a257 | ||
|
|
2bd41f1330 | ||
|
|
c37cd11ca5 | ||
|
|
9dee8edc0c | ||
|
|
f6aa28c380 | ||
|
|
dba1644518 | ||
|
|
260a65b0fb | ||
|
|
835d2fb98c | ||
|
|
a9811addaa | ||
|
|
ee9d88b637 | ||
|
|
194a763c46 | ||
|
|
8d74446bef | ||
|
|
7ed6c7905c | ||
|
|
159a37fa37 | ||
|
|
f39ae0e7c1 | ||
|
|
4a68eac99b | ||
|
|
2665580120 | ||
|
|
a4391aeedd | ||
|
|
b86a967ee2 | ||
|
|
608232568b | ||
|
|
7f85beccb5 | ||
|
|
767f1255ab | ||
|
|
fddbfe7b0e | ||
|
|
4ab7fdc28e | ||
|
|
e352b68878 | ||
|
|
207deeadba | ||
|
|
d18d92f925 | ||
|
|
af3ce47c44 | ||
|
|
d8bfb6712d | ||
|
|
f864f8b5f7 | ||
|
|
31d72efba7 | ||
|
|
d169c951f3 | ||
|
|
90d7e38909 | ||
|
|
938f23e429 | ||
|
|
f97d275413 | ||
|
|
3acb4ca90e | ||
|
|
e86b81bbf5 | ||
|
|
a5447b8b75 | ||
|
|
7ce6452d83 | ||
|
|
5643a306bd | ||
|
|
3c877c504b | ||
|
|
892d1acccb | ||
|
|
1a9c282f76 | ||
|
|
fd1ba46f77 | ||
|
|
a4745626dd | ||
|
|
17bb7ad278 | ||
|
|
152988c17b | ||
|
|
4cd37fc02b | ||
|
|
69b9d674a3 | ||
|
|
bad8061547 | ||
|
|
62963dcefd | ||
|
|
68a35e4735 | ||
|
|
9b9ad77e1c | ||
|
|
118b4d4a01 | ||
|
|
da14ab6f16 | ||
|
|
09a4ca6ab5 | ||
|
|
8a2df79711 | ||
|
|
c30e486b64 | ||
|
|
a575c0c54b | ||
|
|
77fe96ac0d | ||
|
|
5234c3759a | ||
|
|
41b3511ad9 | ||
|
|
128e4a2e8d | ||
|
|
07ac90d798 | ||
|
|
7de87a9b2c | ||
|
|
dff865239a | ||
|
|
07f8f70c5b | ||
|
|
f625c5aabe | ||
|
|
8a74976c1f | ||
|
|
b6bfd4a5cb | ||
|
|
008fb9d258 | ||
|
|
db6db49ed6 | ||
|
|
05453881c3 | ||
|
|
5e47ab9431 | ||
|
|
ec70acd0b9 | ||
|
|
25e61056b6 | ||
|
|
d579e335b5 | ||
|
|
7e344ceb85 | ||
|
|
0145b82ea0 | ||
|
|
b4efe7aab7 | ||
|
|
9ffe951f6d | ||
|
|
a5ea4f57bd | ||
|
|
88f4c16755 | ||
|
|
c7ee071efa | ||
|
|
0740ef7ceb | ||
|
|
b29bd809ac | ||
|
|
8977c9257a | ||
|
|
091b7eacba | ||
|
|
e74b1251c0 | ||
|
|
d282a1649d | ||
|
|
6ce8d49d1b | ||
|
|
c5b197078a | ||
|
|
0494f20d62 | ||
|
|
73aff476dd | ||
|
|
98ee5e651a | ||
|
|
01871ea383 | ||
|
|
1dbdb9438f | ||
|
|
c70f0eadb8 | ||
|
|
26244ad8c2 | ||
|
|
fa0aa5510d | ||
|
|
eec557b6aa | ||
|
|
0cc27c3cc1 | ||
|
|
507089d7b2 | ||
|
|
a6b3517b75 | ||
|
|
2d6beb7813 | ||
|
|
61bc129e1d | ||
|
|
52210a57f0 | ||
|
|
8061a2f108 | ||
|
|
7444eff6d4 | ||
|
|
f35a9da99a | ||
|
|
c3098e9ab2 | ||
|
|
686f9288fc | ||
|
|
1833670fb9 | ||
|
|
3dd42f5aa2 | ||
|
|
99a7beba57 | ||
|
|
edee2b753c | ||
|
|
545d5770be | ||
|
|
ca747a2b54 | ||
|
|
17da165cfe | ||
|
|
5e6788c679 | ||
|
|
425deadca9 | ||
|
|
2c8e9dd3a5 | ||
|
|
7a72f1a253 | ||
|
|
208e556332 | ||
|
|
c65d11bfb5 | ||
|
|
3b5b52d89a | ||
|
|
a4f6c8f990 | ||
|
|
670c329852 | ||
|
|
f3551c8422 | ||
|
|
90b8187882 | ||
|
|
1a43259989 | ||
|
|
3c0a630475 | ||
|
|
2a1e5a9729 | ||
|
|
413c66beba | ||
|
|
1416e696b1 | ||
|
|
d373cf89c7 | ||
|
|
dd886d22f0 | ||
|
|
472569a27c | ||
|
|
76cf6559cc | ||
|
|
a34e8dcdc9 | ||
|
|
da752fc9a4 | ||
|
|
beb2de2dd9 | ||
|
|
2a8b65e105 | ||
|
|
62a916bc24 | ||
|
|
c47b833e7b | ||
|
|
09b0958b5f | ||
|
|
3a4c3d3e58 | ||
|
|
7484292e63 | ||
|
|
687c2741b8 | ||
|
|
2fb285e530 | ||
|
|
16f6473938 | ||
|
|
66546208b2 | ||
|
|
532274045e | ||
|
|
9347c72fb6 | ||
|
|
e90bb7169c | ||
|
|
8a2c41e183 | ||
|
|
59fb65293a | ||
|
|
e7718b92b7 | ||
|
|
cdfaf761df | ||
|
|
1a9ea6f738 | ||
|
|
945c1c8597 | ||
|
|
e4d0f7acd5 | ||
|
|
250496c953 | ||
|
|
e47dc758c9 | ||
|
|
b92a843c5f | ||
|
|
91bea9c5b3 | ||
|
|
d75bb5cbe1 | ||
|
|
2671259fdb | ||
|
|
2024010119 | ||
|
|
412040f77e | ||
|
|
d210660ce8 | ||
|
|
863a12562b | ||
|
|
5da606a9ac | ||
|
|
8d20f3d5c4 | ||
|
|
5d360180af | ||
|
|
f0fbed6007 | ||
|
|
519de7c833 | ||
|
|
97ccef1a04 | ||
|
|
c4df0dd06e | ||
|
|
cd114c6818 | ||
|
|
1707b8cdba | ||
|
|
41d4d70b98 | ||
|
|
0e999482cb | ||
|
|
65b2c06027 | ||
|
|
d7b61ede07 | ||
|
|
87fc1c84b8 | ||
|
|
d4b5f12383 | ||
|
|
eb62b0d665 | ||
|
|
91387a741b | ||
|
|
e8b34cb00d | ||
|
|
82954258c1 | ||
|
|
50f092551b | ||
|
|
c36a64be68 | ||
|
|
a343b20775 | ||
|
|
a714e76ae1 | ||
|
|
d21d5c9510 | ||
|
|
cd6788a2bb | ||
|
|
6b99399c41 | ||
|
|
952b6af445 | ||
|
|
7c674ad7fa | ||
|
|
d7d2ac3951 | ||
|
|
29e67d307a | ||
|
|
7320b7df62 | ||
|
|
11fb4233f7 | ||
|
|
84bb350b14 | ||
|
|
38e3694d1c | ||
|
|
1084935241 | ||
|
|
f5f0b9ecaa | ||
|
|
230fc49ae2 | ||
|
|
250d507bdf | ||
|
|
a818653174 | ||
|
|
5c3b044740 | ||
|
|
c5aa8729a1 | ||
|
|
3f78d76da1 | ||
|
|
70c19ccf16 | ||
|
|
68db9cb499 | ||
|
|
d0466fa777 | ||
|
|
21ab64e962 | ||
|
|
a0145cebf2 | ||
|
|
69176fc5f4 | ||
|
|
278dce9ba6 | ||
|
|
1cfa3ee4c7 | ||
|
|
9a95cd5794 | ||
|
|
a62fe3df6f | ||
|
|
7701244a08 | ||
|
|
96e31e4b78 | ||
|
|
ec208af474 | ||
|
|
242641264d | ||
|
|
d3a9a0615b | ||
|
|
3277e8c89c | ||
|
|
d02b9442a5 | ||
|
|
bac385b59c | ||
|
|
b1a0ab8086 | ||
|
|
a33749eb71 | ||
|
|
f5e4ee90e4 | ||
|
|
690d5e6dbd | ||
|
|
a76c055b63 | ||
|
|
70c461c60b | ||
|
|
d51b71ee80 | ||
|
|
3666448ca6 | ||
|
|
d3311d9f43 | ||
|
|
3e1735b06e | ||
|
|
de7ef7eace | ||
|
|
7e89458a3b | ||
|
|
f212bafe46 | ||
|
|
86fe40708b | ||
|
|
d718747c5b | ||
|
|
46ee9ac41c | ||
|
|
f1d306feab | ||
|
|
2d0db98e83 | ||
|
|
3df06a1c68 | ||
|
|
a8f9432a3a | ||
|
|
561e0b04a8 | ||
|
|
404b6a864b | ||
|
|
4feaf31225 | ||
|
|
391aa14845 | ||
|
|
a0d61b4c37 | ||
|
|
2952737755 | ||
|
|
f103aa4753 | ||
|
|
884856023a | ||
|
|
d8188fce7b | ||
|
|
0f15f1ab73 | ||
|
|
488a236b7a | ||
|
|
e833823e15 | ||
|
|
ee4ba104e7 | ||
|
|
4fdc08295b | ||
|
|
a3ff49aaf1 | ||
|
|
76364ea767 | ||
|
|
8eec50d764 | ||
|
|
32b659b346 | ||
|
|
00809909ae | ||
|
|
9f7684f6fe | ||
|
|
2bed7d370e | ||
|
|
d2b852f7cb | ||
|
|
901939bd96 | ||
|
|
edfdcc8cee | ||
|
|
3982c9a552 | ||
|
|
4490b2d209 | ||
|
|
eb4bbf3294 | ||
|
|
dc97d48491 | ||
|
|
0f50dc848e | ||
|
|
c5e4b83de3 | ||
|
|
a08ab46713 | ||
|
|
f50a7058d6 | ||
|
|
2c74f0a040 | ||
|
|
58835e40f3 | ||
|
|
8befa5918a | ||
|
|
df80f7ff2a | ||
|
|
5f66786ef1 | ||
|
|
3a965856a5 | ||
|
|
03df609d77 | ||
|
|
178581b560 | ||
|
|
ffd2314120 | ||
|
|
815b595d2f | ||
|
|
11e56403dd | ||
|
|
4baadecda5 | ||
|
|
cf552b5f3b | ||
|
|
1894304d33 | ||
|
|
9d5392fb02 | ||
|
|
c280645671 | ||
|
|
45f92e6b38 | ||
|
|
6509f09961 | ||
|
|
3c279a6f0e | ||
|
|
9ec3f03871 | ||
|
|
84a9c2c112 | ||
|
|
af368119cb | ||
|
|
89b9189efa | ||
|
|
dd59b8c7b9 | ||
|
|
f83491274f | ||
|
|
c0435fdff4 | ||
|
|
3c09c77269 | ||
|
|
547e101f1d | ||
|
|
0130f64934 | ||
|
|
361e0543ee | ||
|
|
63aa5d3b4e | ||
|
|
01302d097c | ||
|
|
e6095cb7e8 | ||
|
|
b876b8af11 | ||
|
|
a7c41f3fcd | ||
|
|
4772bd8d4c | ||
|
|
d471067e3f | ||
|
|
d0b7780239 | ||
|
|
e627ca6bd7 | ||
|
|
c97172bdd4 | ||
|
|
ce8a745fb4 | ||
|
|
3e9efd1401 | ||
|
|
20340190b5 | ||
|
|
265040a78c | ||
|
|
448d7e0c5a | ||
|
|
6eb1874c5a | ||
|
|
4c70745cc1 | ||
|
|
7795748a3f | ||
|
|
098ef4d7cf | ||
|
|
e3f91bfe1b | ||
|
|
7374fe73a3 | ||
|
|
d2bde205f0 | ||
|
|
5620f70f9a | ||
|
|
37f258b1bf | ||
|
|
e2dd2a133e | ||
|
|
7514644e07 | ||
|
|
16b0aeda7d | ||
|
|
86e4f4a841 | ||
|
|
607eacf8c7 | ||
|
|
7a049644a8 | ||
|
|
17a13f00f8 | ||
|
|
43436e48e0 | ||
|
|
5a39102405 | ||
|
|
94999101e3 | ||
|
|
e619b7c4f4 | ||
|
|
b7c2e8cb67 | ||
|
|
fb76893e18 | ||
|
|
88d812fe82 | ||
|
|
77f9f4664a | ||
|
|
5c2f85c39e | ||
|
|
ac4d22cd12 | ||
|
|
cf95e44cb4 | ||
|
|
65dd2bb429 | ||
|
|
6be855be6a | ||
|
|
b6e3f4423b | ||
|
|
0c61d81713 | ||
|
|
7c6f5dba63 | ||
|
|
44cfc7e62a | ||
|
|
96670d5f16 | ||
|
|
36b971ee4e | ||
|
|
f1a9629652 | ||
|
|
20230402d0 | ||
|
|
5c2c3a6c88 | ||
|
|
fb019d43bf | ||
|
|
025aa33773 | ||
|
|
302e21fd58 | ||
|
|
211512ae64 | ||
|
|
8ec917b1c3 | ||
|
|
1c7534f009 | ||
|
|
ae745d9397 | ||
|
|
60f37aae2f | ||
|
|
d7daf5f724 | ||
|
|
e5103d9429 | ||
|
|
8fecb29848 | ||
|
|
290ea6179d | ||
|
|
9695a40fc9 | ||
|
|
1913b95227 | ||
|
|
a874aea692 | ||
|
|
69c52099e7 | ||
|
|
cfc0747d5d | ||
|
|
fcd7e8768d | ||
|
|
3c34dd8275 | ||
|
|
1116e481be | ||
|
|
63cf9d04de | ||
|
|
3364d4d147 | ||
|
|
57ad21e4bd | ||
|
|
414f87981f | ||
|
|
b1459c79cf | ||
|
|
352ea07226 | ||
|
|
27018787af | ||
|
|
4e305eca26 | ||
|
|
9e9c0ceaf4 | ||
|
|
b3bf18b1c0 | ||
|
|
b1619f675f | ||
|
|
96c3de12eb | ||
|
|
719dbb8bae | ||
|
|
f38a7f7f8f | ||
|
|
6ea38b4438 | ||
|
|
f7447aece1 | ||
|
|
aa2b9ec476 | ||
|
|
3ee00f8bc2 | ||
|
|
fccab60a5c | ||
|
|
0f4af38457 | ||
|
|
aef39f1160 | ||
|
|
2023012408 | ||
|
|
95a7661bb1 | ||
|
|
618d317803 | ||
|
|
ae897c8cdb | ||
|
|
d0a0f3c052 | ||
|
|
91b9591b10 | ||
|
|
aa7361337d | ||
|
|
284d77fe2e | ||
|
|
826178f1e2 | ||
|
|
acccf8a9b8 | ||
|
|
57c066f0be | ||
|
|
e44f64ae92 | ||
|
|
d51980a3f5 | ||
|
|
c3d73e7ecb | ||
|
|
b077f6821d | ||
|
|
a79de11af7 | ||
|
|
2023011763 | ||
|
|
b46e40e86b | ||
|
|
a6d6cdd165 | ||
|
|
dc8da605f9 | ||
|
|
8b299a29c7 | ||
|
|
3109b865d2 | ||
|
|
0c5956c43c | ||
|
|
1c83b39691 | ||
|
|
77874b473c | ||
|
|
b7cce7be15 | ||
|
|
3cd3362417 | ||
|
|
e97e925efb | ||
|
|
0f032235cf | ||
|
|
e0f0984da7 | ||
|
|
4d22b5aaef | ||
|
|
80b8846318 | ||
|
|
bf641faafa | ||
|
|
23d8b78ce1 | ||
|
|
3b2244077d | ||
|
|
ee5cdb9713 | ||
|
|
03d02d67f7 | ||
|
|
5798145581 | ||
|
|
51ef0b7f66 | ||
|
|
97b4542c73 | ||
|
|
c1cd0c09a2 | ||
|
|
1fc1f47d80 | ||
|
|
ec471a5bc2 | ||
|
|
a893fc0ca2 | ||
|
|
3761dc0433 | ||
|
|
aa71a07fbe | ||
|
|
088293f5e7 | ||
|
|
7c660aa86e | ||
|
|
435d8fa0a2 | ||
|
|
5cd6f1d064 | ||
|
|
ec20dfe312 | ||
|
|
924ffb5a35 | ||
|
|
62c7f59b94 | ||
|
|
e97176b1d7 | ||
|
|
d649f5d826 | ||
|
|
6c37177cf5 | ||
|
|
14775aa975 | ||
|
|
44b6336372 | ||
|
|
36d2bb332b | ||
|
|
4dbe45640a | ||
|
|
4b3f0b9f08 | ||
|
|
12af069dca | ||
|
|
d42e708d31 | ||
|
|
b7bb973118 | ||
|
|
750b2a6313 | ||
|
|
de0da86bd7 | ||
|
|
8e283f512a | ||
|
|
73162a4bc3 | ||
|
|
1a9761736e | ||
|
|
fd1f7665a7 | ||
|
|
6d14573fd0 | ||
|
|
cf69b836ac | ||
|
|
a7a771b92b | ||
|
|
def011c029 | ||
|
|
4b055bf260 | ||
|
|
1ba7484d60 | ||
|
|
51c518da1e | ||
|
|
a3b6b03dfb | ||
|
|
18e3b38c69 | ||
|
|
0ad30063ff | ||
|
|
7812c64a31 | ||
|
|
3d2376ab52 | ||
|
|
6b207bbf2b | ||
|
|
3f079ba7c6 | ||
|
|
8f4c89f50e | ||
|
|
6b7a543c82 | ||
|
|
2ba68d24f2 | ||
|
|
46877e0a92 | ||
|
|
b55f555487 | ||
|
|
a38b63be18 | ||
|
|
1bebd6f4f5 | ||
|
|
3da63f394d | ||
|
|
2a54e3d770 | ||
|
|
06b02ba46e | ||
|
|
d01ae55109 | ||
|
|
8868d7d188 | ||
|
|
2eec9892be | ||
|
|
01ae621f11 | ||
|
|
f984aa0d2c | ||
|
|
0881a6bc17 | ||
|
|
2c6a73546d | ||
|
|
a29944660e | ||
|
|
f6ce624c6f | ||
|
|
c09ec8e4d1 | ||
|
|
31bbaad06e | ||
|
|
b9ca1fe830 | ||
|
|
e61585f2f3 | ||
|
|
0de1aacb0c | ||
|
|
168829b555 | ||
|
|
170fc517d0 | ||
|
|
0fbf1c7c71 | ||
|
|
694be39c71 | ||
|
|
dad26d81df | ||
|
|
bcaea097ea | ||
|
|
d56fe74e24 | ||
|
|
4603d540c3 | ||
|
|
f9d53303bb | ||
|
|
d04faa6505 | ||
|
|
07da058eae | ||
|
|
cefa6b9878 | ||
|
|
04d0b0223f | ||
|
|
78ad6d2d88 | ||
|
|
22cbd9fa58 | ||
|
|
984049586a | ||
|
|
cdfc2b92e3 | ||
|
|
4530abe8df | ||
|
|
586020b8b6 | ||
|
|
3a8626fd04 | ||
|
|
a6e483a434 | ||
|
|
6a942e56b1 | ||
|
|
87c91550ad | ||
|
|
731daf0f37 | ||
|
|
f931e53890 | ||
|
|
b5efc68737 | ||
|
|
b9e6e7926c | ||
|
|
845034c81c | ||
|
|
54d42e3f40 | ||
|
|
e03ac3136e | ||
|
|
6fb41a202a | ||
|
|
4bef330ce1 | ||
|
|
8a5f719964 | ||
|
|
209d5e8e90 | ||
|
|
9d041aa582 | ||
|
|
6532b3e655 | ||
|
|
c1c355160d | ||
|
|
83515d5610 | ||
|
|
aa10dccf90 | ||
|
|
f4fd53211a | ||
|
|
4993d19466 | ||
|
|
19f9bbca0d | ||
|
|
779d8e1627 | ||
|
|
bb07410448 | ||
|
|
d826f9e72f | ||
|
|
6a6130615d | ||
|
|
a8e7021be2 | ||
|
|
38259d0382 | ||
|
|
f7e7259910 | ||
|
|
f0bfeba733 | ||
|
|
c3a7a24eea | ||
|
|
bbbcd780c9 | ||
|
|
475469a2e7 | ||
|
|
3a7447dcb6 | ||
|
|
e5d8cbd383 | ||
|
|
3c08dca7e7 | ||
|
|
d083f01d22 | ||
|
|
68cf393644 | ||
|
|
18f7230662 | ||
|
|
728f735281 | ||
|
|
ecc418ba77 | ||
|
|
3af5b7f2ac | ||
|
|
7a7cfcacbe | ||
|
|
52594355bf | ||
|
|
0d06c28b19 | ||
|
|
ccc4677252 | ||
|
|
821fc9feed | ||
|
|
82b46726fc | ||
|
|
8df872a482 | ||
|
|
c79c306adb | ||
|
|
51fdaad002 | ||
|
|
885cd8ff04 | ||
|
|
2707af403a | ||
|
|
2d227e5222 | ||
|
|
70529878e2 | ||
|
|
3b7a962dc6 | ||
|
|
6dcf5c3d7d | ||
|
|
b089bb5e7b | ||
|
|
a91a67668e | ||
|
|
220a908118 | ||
|
|
54841248e7 | ||
|
|
a0b42e6538 | ||
|
|
3312cf525d | ||
|
|
2093667548 | ||
|
|
3c868d7961 | ||
|
|
707f4f5816 | ||
|
|
b3ab6311c5 | ||
|
|
d56f605b63 | ||
|
|
f8b713f425 | ||
|
|
5209e95bc7 | ||
|
|
ef67a45702 | ||
|
|
b88eb72ac2 | ||
|
|
32847f7254 | ||
|
|
71df93b534 | ||
|
|
bb028191f8 | ||
|
|
19af8fc7d8 | ||
|
|
a06671b47f | ||
|
|
5f385d88e0 | ||
|
|
9cb7a364a3 | ||
|
|
f68cbc577d | ||
|
|
dc975e8974 | ||
|
|
4311ade535 | ||
|
|
cd23401411 | ||
|
|
176ee6910f | ||
|
|
13c8f3d3aa | ||
|
|
ce9af687bc | ||
|
|
43f0d0cacd | ||
|
|
20b4e6953e | ||
|
|
7da287e3aa | ||
|
|
205f885d69 | ||
|
|
3715cd349d | ||
|
|
e4c3ecc57e | ||
|
|
673c5d886d | ||
|
|
f799b568d1 | ||
|
|
7bff4661f6 | ||
|
|
ffd8bef808 | ||
|
|
02cee2234d | ||
|
|
e0dd2be3fb | ||
|
|
a33c011c21 | ||
|
|
7c3f42bbba | ||
|
|
edac9820b5 | ||
|
|
84a47f7102 | ||
|
|
97ae8afb6f | ||
|
|
4138333f5c | ||
|
|
61339a8ae2 | ||
|
|
50eb2e3855 | ||
|
|
5fc78e4584 | ||
|
|
2736a2f69e | ||
|
|
179993f0cd | ||
|
|
b734f657f9 | ||
|
|
c29d7d02c2 | ||
|
|
3df6b2a58c | ||
|
|
b8aa2d2c32 | ||
|
|
0ff885461b | ||
|
|
ca43f95fb1 | ||
|
|
09700f676b | ||
|
|
4271e9cffa | ||
|
|
f3dc8a10d5 | ||
|
|
00fb486f6a | ||
|
|
4173e94c6f | ||
|
|
261d3d3340 | ||
|
|
15e20fcae1 | ||
|
|
f4f47f5fe3 | ||
|
|
71d11de7ca | ||
|
|
88d74a15aa | ||
|
|
0f02fc0c77 | ||
|
|
3f90fb42d8 | ||
|
|
9bd8988300 | ||
|
|
3c804bcfec | ||
|
|
cca4cdc4f1 | ||
|
|
8f899aaf8a | ||
|
|
e53b4bb439 | ||
|
|
ab247a1309 | ||
|
|
c21e9edad4 | ||
|
|
9c21a20f8b | ||
|
|
7191ebb615 | ||
|
|
a74731d7f5 | ||
|
|
e086f0b3fe | ||
|
|
8255aa23f4 | ||
|
|
a4bc08f5a3 | ||
|
|
7e5aa1e2a5 | ||
|
|
0818dbc36a | ||
|
|
347c4b2625 | ||
|
|
34f0d4d0c4 | ||
|
|
cbedb57511 | ||
|
|
9ef825d2fd | ||
|
|
85ae745910 | ||
|
|
7411da8d5a | ||
|
|
3f75a8369f | ||
|
|
4cd621e877 | ||
|
|
6e3a2fe0bf | ||
|
|
8b0e1f941a | ||
|
|
c7c5e7670a | ||
|
|
f6c621ef1b | ||
|
|
faf32d451d | ||
|
|
252fd7ecb1 | ||
|
|
7fa89dddb4 | ||
|
|
fefdb8c84e | ||
|
|
a6cc05936e | ||
|
|
b209843545 | ||
|
|
19759ed36e | ||
|
|
1a7ae8e7b9 | ||
|
|
da1f645670 | ||
|
|
3a2015ee26 | ||
|
|
c440418ce6 | ||
|
|
3d37a5ba1d | ||
|
|
15f4cfb6d9 | ||
|
|
be36de2482 | ||
|
|
391237f7df | ||
|
|
977e5effd9 | ||
|
|
8b36a4cb19 | ||
|
|
c8cd94a772 | ||
|
|
764316a53d | ||
|
|
2048fd4042 | ||
|
|
f84b3de24b | ||
|
|
6a1f3ec08b | ||
|
|
2e353aee96 | ||
|
|
8edfd14a37 | ||
|
|
1a191ec6f7 | ||
|
|
e7171e94b4 | ||
|
|
398d937419 | ||
|
|
34fe5ab143 | ||
|
|
1b08f43f82 | ||
|
|
b24a2e2fdc | ||
|
|
4c4c6e626e | ||
|
|
7310370a31 | ||
|
|
8ae94f0059 | ||
|
|
8fccf20892 | ||
|
|
5a874ae241 | ||
|
|
f4e1ed25f2 | ||
|
|
cbfbb49ab4 | ||
|
|
489b16efce | ||
|
|
b82c1693c0 | ||
|
|
019bfc4e35 | ||
|
|
dfda5c054a | ||
|
|
f657169616 | ||
|
|
4c06da8b70 | ||
|
|
9fe2393a00 | ||
|
|
e2e8d94b14 | ||
|
|
4f9a7f8c87 | ||
|
|
bb0502ff44 | ||
|
|
c256442245 | ||
|
|
1137404190 | ||
|
|
d57c6d0284 | ||
|
|
76bbf57b3d | ||
|
|
806a47a7cc | ||
|
|
29851c18aa | ||
|
|
dea950c2c8 | ||
|
|
a367dfb22e | ||
|
|
9fe1a7b373 | ||
|
|
8e2d21c548 | ||
|
|
bedf1cd357 | ||
|
|
13f180a70c | ||
|
|
6654239c94 | ||
|
|
1b61e5e9e9 | ||
|
|
43b3b907f8 | ||
|
|
fcd896508b | ||
|
|
f55c990e86 | ||
|
|
d110372f99 | ||
|
|
c862af09f2 | ||
|
|
1cfeec0ca3 | ||
|
|
a0649edc1e | ||
|
|
0e0bcb3e10 | ||
|
|
686528d627 | ||
|
|
3afa920151 | ||
|
|
32c493e994 | ||
|
|
1a76bdf891 | ||
|
|
af48b3df29 | ||
|
|
58ac1fb2fa | ||
|
|
e922704f72 | ||
|
|
c6115735c7 | ||
|
|
9ddf5c72be | ||
|
|
cc5640326b | ||
|
|
bf447d7703 | ||
|
|
cbb008c938 | ||
|
|
eaa0c52b45 | ||
|
|
82791f7efc | ||
|
|
8c533e34ea | ||
|
|
37708ad9cd | ||
|
|
090dee857f | ||
|
|
d779ff7e6d | ||
|
|
fd8858f8c9 | ||
|
|
b234647a63 | ||
|
|
6e93eefc82 | ||
|
|
38fca30125 | ||
|
|
012ee9ca85 | ||
|
|
151252e33a | ||
|
|
7136cfc68b | ||
|
|
408c04f25f | ||
|
|
7f8e0dbc40 | ||
|
|
0de7ab18f6 | ||
|
|
e9bc7331bd | ||
|
|
797dd7c449 | ||
|
|
f37ccaa64f | ||
|
|
ab3937ee5a | ||
|
|
00f4551a7b | ||
|
|
257ddd028d | ||
|
|
e0a22e76f8 | ||
|
|
00a3610331 | ||
|
|
f502725120 | ||
|
|
b62a74b315 | ||
|
|
2ec382ae0e | ||
|
|
cbfee31593 | ||
|
|
6d647e13ff | ||
|
|
d2af3ff98d | ||
|
|
052d17e66a | ||
|
|
a9bc954e17 | ||
|
|
2983426771 | ||
|
|
c61eb94b3f | ||
|
|
3829eab1cf | ||
|
|
3fe8eeedc5 | ||
|
|
1efef88b6e | ||
|
|
7acdaf0b43 | ||
|
|
1ed25d76ba | ||
|
|
474c1f5e32 | ||
|
|
8b71fea5dc | ||
|
|
7bd99a22ee | ||
|
|
75b8cca3b3 | ||
|
|
ef2c29d5d4 | ||
|
|
218b3c8274 | ||
|
|
db9cb2ddda | ||
|
|
722d66e85a | ||
|
|
f6269f0193 | ||
|
|
520eae817a | ||
|
|
d099941360 | ||
|
|
e3e76fa8c5 | ||
|
|
2553806e79 | ||
|
|
1bcbc5a353 | ||
|
|
15d351b0f0 | ||
|
|
c144c95cda | ||
|
|
f08f4fd87d | ||
|
|
f8aaeef218 | ||
|
|
7915e365b3 | ||
|
|
1c68f81c37 | ||
|
|
d4c9db0a27 | ||
|
|
b5e0e29ec6 | ||
|
|
569be4c6c9 | ||
|
|
e7ca237b07 | ||
|
|
a7d3b72117 | ||
|
|
3ba7b5cf2d | ||
|
|
254e9765fe | ||
|
|
3304f284a5 | ||
|
|
0d5f862daf | ||
|
|
51dfacd542 | ||
|
|
c691d52fa7 | ||
|
|
de3d09fe79 | ||
|
|
eaa413c566 | ||
|
|
407205e52b | ||
|
|
552414978e | ||
|
|
607081bbaa | ||
|
|
e73383fbbb | ||
|
|
2e8e63fb0b | ||
|
|
874f7dd416 | ||
|
|
8b0e3b1624 | ||
|
|
9b946f2b7a | ||
|
|
11841f688b | ||
|
|
03c4f04246 | ||
|
|
a1f06ae27f | ||
|
|
69dffd78a6 | ||
|
|
2750e19657 | ||
|
|
b0987f727b | ||
|
|
a4d9b0b468 | ||
|
|
e2b87e0d74 | ||
|
|
2166b4ca17 | ||
|
|
d2d4d68585 | ||
|
|
faf68dbc5c | ||
|
|
305896fcb3 | ||
|
|
6c9adea0d3 | ||
|
|
fc7630a66d | ||
|
|
3248153d9f | ||
|
|
246b9f3130 | ||
|
|
865144850d | ||
|
|
d9752a4c21 | ||
|
|
dba14d2630 | ||
|
|
2986e64a49 | ||
|
|
1d8bd11b67 | ||
|
|
bafb99d520 | ||
|
|
3cc8a74a91 | ||
|
|
c0aa5a438f | ||
|
|
825d401403 | ||
|
|
9dfca77c36 | ||
|
|
82c4af2902 | ||
|
|
736344e151 | ||
|
|
6f9663da62 | ||
|
|
f8ae1786dd | ||
|
|
c60ed17583 | ||
|
|
e0f0b5bcf9 | ||
|
|
9e96073128 | ||
|
|
0db65c22d3 | ||
|
|
d785135606 | ||
|
|
ae15eda546 | ||
|
|
f2d44ab5a7 | ||
|
|
43798fc2e8 | ||
|
|
9dc4b40d7a | ||
|
|
1cb19dbf65 | ||
|
|
1ab4289ad6 | ||
|
|
e2ae1b249c | ||
|
|
92b7efafca | ||
|
|
f092e4038f | ||
|
|
aa5dae391b | ||
|
|
08a6fd4ad4 | ||
|
|
a61150a96c | ||
|
|
0f9cb5590e | ||
|
|
c0a83b27eb | ||
|
|
f79b1f71b8 | ||
|
|
8e027c445f | ||
|
|
dda3e3c39a | ||
|
|
fd5157998c | ||
|
|
e0217e8c79 | ||
|
|
3ab1c42266 | ||
|
|
d1676776aa | ||
|
|
bdde69d011 | ||
|
|
6dec42a33a | ||
|
|
199bc3f0ad | ||
|
|
17dd833925 | ||
|
|
4ec144c969 | ||
|
|
3e36f2b0ac | ||
|
|
07a03b3e73 | ||
|
|
c33258832e | ||
|
|
a7aa08ce07 | ||
|
|
06d63a862e | ||
|
|
43d1c4c4b5 | ||
|
|
f81feb1e69 | ||
|
|
01cf01e084 | ||
|
|
97a725fbd0 | ||
|
|
ace92ba281 | ||
|
|
d631c76e8d | ||
|
|
e6d33f77da | ||
|
|
a6d3e3687b | ||
|
|
08c2bcb952 | ||
|
|
98ca4bdede | ||
|
|
3f8e741562 | ||
|
|
6e464ebd9b | ||
|
|
c329279339 | ||
|
|
cf04753ad7 | ||
|
|
69e7eab11f | ||
|
|
dea206b023 | ||
|
|
5deaf58928 | ||
|
|
15e2952a2b | ||
|
|
a9fba1c849 | ||
|
|
71e573d082 | ||
|
|
334a4fa159 | ||
|
|
21f94ee800 | ||
|
|
540bfd7a72 | ||
|
|
8fbed2b13a | ||
|
|
aa17510e0a | ||
|
|
bf65e8cd12 | ||
|
|
0f5c6e8f04 | ||
|
|
b1b916ce15 | ||
|
|
a6a732e1fc | ||
|
|
a5c2f28539 | ||
|
|
18261fe31c | ||
|
|
079046863c | ||
|
|
07b965bba1 | ||
|
|
c39113ee41 | ||
|
|
14f90502a4 | ||
|
|
b0673c3563 | ||
|
|
373c6d8d55 | ||
|
|
b8fc828955 | ||
|
|
b43b040512 | ||
|
|
50b7608f9d | ||
|
|
7085e5b629 | ||
|
|
7d5985baf9 | ||
|
|
7c40a424c0 | ||
|
|
baf882ace7 | ||
|
|
ba82f0bef9 | ||
|
|
d9c6a0305b | ||
|
|
d9b1211191 | ||
|
|
99f1e02766 | ||
|
|
242c0db26b | ||
|
|
dd49e41c42 | ||
|
|
6db15e8693 | ||
|
|
4c9cab3f8a | ||
|
|
b2c0413a98 | ||
|
|
e34c7c00b1 | ||
|
|
7c447bbdc7 | ||
|
|
7bf1f2cc84 | ||
|
|
afa2c4e0af | ||
|
|
2ff7db1b36 | ||
|
|
9f0626da64 | ||
|
|
d8cb5c1cf5 | ||
|
|
dca56da0ef | ||
|
|
ec75d16ea8 | ||
|
|
5cae8ea733 | ||
|
|
1ccd8f6a64 | ||
|
|
9c293bb82b | ||
|
|
9897ee9591 | ||
|
|
5215415315 | ||
|
|
54891d11e0 | ||
|
|
567c8303bf | ||
|
|
2a60edcd52 | ||
|
|
d61ac32d7b | ||
|
|
b57e6cff7e | ||
|
|
5b99f19dac | ||
|
|
6c03571887 | ||
|
|
4fb410a93c | ||
|
|
5e1db9fdd3 | ||
|
|
9d7480ae3c | ||
|
|
f39cf6d855 | ||
|
|
001d116884 | ||
|
|
02c5e62efe | ||
|
|
446df07b62 | ||
|
|
8583b150c9 | ||
|
|
a859aa72ee | ||
|
|
0896036266 | ||
|
|
311b78ae82 | ||
|
|
f5cf4fc8fb | ||
|
|
7ceb58b2aa | ||
|
|
293dd76af1 | ||
|
|
3918c45ced | ||
|
|
4ec403347c | ||
|
|
e01266ffcb | ||
|
|
f246fb2fc2 | ||
|
|
f7b26b34cb | ||
|
|
a1bcdc225e | ||
|
|
7771241cc0 | ||
|
|
6e3af646b2 | ||
|
|
82bf8c138d | ||
|
|
e21b001116 | ||
|
|
577024f1e9 | ||
|
|
d4ad4a25db | ||
|
|
30577b0c17 | ||
|
|
212de25409 | ||
|
|
5da8bbf45a | ||
|
|
aa0e10ead7 | ||
|
|
a9906c7c29 | ||
|
|
9fefe08b3f | ||
|
|
684bfff713 | ||
|
|
3db6b88d82 | ||
|
|
8ae96774df | ||
|
|
f68017d21e | ||
|
|
2b725a4db5 | ||
|
|
af1a5f130b | ||
|
|
86e3994e87 | ||
|
|
1e6ac5590e | ||
|
|
5e42b1c9f8 | ||
|
|
9d842630c9 | ||
|
|
77cb906dfe | ||
|
|
a59e846f74 | ||
|
|
6e6340a0c9 | ||
|
|
357e82e51b | ||
|
|
394d8cfd18 | ||
|
|
ef80bd401f | ||
|
|
f51d61d57a | ||
|
|
1dd256a68a | ||
|
|
85644aa3fb | ||
|
|
effbc258bb | ||
|
|
e615600ff1 | ||
|
|
60465c4664 | ||
|
|
c03c058bd5 | ||
|
|
7238c8944d | ||
|
|
9a41fd5327 | ||
|
|
b471042037 | ||
|
|
2886f06977 | ||
|
|
d630484eeb | ||
|
|
e24299239e | ||
|
|
d2fa470165 | ||
|
|
168453da71 | ||
|
|
23a06d63ac | ||
|
|
751aa1944a | ||
|
|
05b5f3f845 | ||
|
|
16fc6862a8 | ||
|
|
7e1c0f39e7 | ||
|
|
deccf20a35 | ||
|
|
73c0a645e0 | ||
|
|
e975bd0c8d | ||
|
|
78da928727 | ||
|
|
11962dabba | ||
|
|
dceb5d09cd | ||
|
|
b4cccf23d4 | ||
|
|
b911af200c | ||
|
|
68683c444f | ||
|
|
a185593d65 | ||
|
|
525040238e | ||
|
|
33f89a08f3 | ||
|
|
11645e1fac | ||
|
|
6390140539 | ||
|
|
072066c49c | ||
|
|
a2e9366c84 | ||
|
|
391669a451 | ||
|
|
0c6c76e081 | ||
|
|
f1520bdde6 | ||
|
|
3089880f18 | ||
|
|
ab11b74be4 | ||
|
|
a5a97be017 | ||
|
|
80b5bc1b68 | ||
|
|
5c7dcaffe8 | ||
|
|
5095899245 | ||
|
|
4800e5d2ae | ||
|
|
3b1e37f718 | ||
|
|
6577388250 | ||
|
|
3b9dbd4146 | ||
|
|
a1260feeed | ||
|
|
7322504ad0 | ||
|
|
de569f0052 | ||
|
|
e7097a9d25 | ||
|
|
c1dbc800e5 | ||
|
|
951746297e | ||
|
|
984304568d | ||
|
|
723217bdea | ||
|
|
0fdb71f7e4 | ||
|
|
12ce76b56a | ||
|
|
0030d18448 | ||
|
|
0e3e6ac442 | ||
|
|
430e8193e0 | ||
|
|
03e8ed4d88 | ||
|
|
ef492f6178 | ||
|
|
8eea45ef50 | ||
|
|
ff951341c9 | ||
|
|
df570afd52 | ||
|
|
07d755df11 | ||
|
|
37585bd5a5 | ||
|
|
89e24bf8f2 | ||
|
|
8d2fcd3518 | ||
|
|
f39ab3875e | ||
|
|
82efe6c60d | ||
|
|
75972d59a8 | ||
|
|
e7d60aac9c | ||
|
|
a0bfbdd49c | ||
|
|
ba594982f0 | ||
|
|
2157f4f193 | ||
|
|
309bae423c | ||
|
|
4f8bf2ae78 | ||
|
|
85c1f8a9e0 | ||
|
|
e00e7e1e56 | ||
|
|
1a6defdbcc | ||
|
|
ef577a6509 | ||
|
|
b7c6838e45 | ||
|
|
91d04cec5c | ||
|
|
3bd8441079 | ||
|
|
8cf45a5197 | ||
|
|
8dc1377efb | ||
|
|
6c32148f90 | ||
|
|
315e568de0 | ||
|
|
5d16b28869 | ||
|
|
5624a89231 | ||
|
|
63c42b14f2 | ||
|
|
6f1eaa9b39 | ||
|
|
ca42e5e00a | ||
|
|
61feee690c | ||
|
|
d4ed955aee | ||
|
|
b46227dcb6 | ||
|
|
fd8d371ac7 | ||
|
|
0e06e298d4 | ||
|
|
72df905902 | ||
|
|
0d748a0699 | ||
|
|
27c40dc6b0 | ||
|
|
8e34e6fbb4 | ||
|
|
3bc98ed623 | ||
|
|
70a92a858a | ||
|
|
49d04374a4 | ||
|
|
8540902a35 | ||
|
|
8c6fcee3ca | ||
|
|
13803d0dbb | ||
|
|
423986996a | ||
|
|
1c9e7b7ea6 | ||
|
|
6de1ad9d3d | ||
|
|
5004ae3457 | ||
|
|
e67cc75063 | ||
|
|
0edbcbdf19 | ||
|
|
f0fe79dd3b | ||
|
|
daa1958f86 | ||
|
|
2c26f02f5c | ||
|
|
af87650bc4 | ||
|
|
2b19c0bc68 | ||
|
|
76a2dcb5a9 | ||
|
|
68ec3d1c10 | ||
|
|
2ff19084ca | ||
|
|
62f062ecfa | ||
|
|
cce17ad0a0 | ||
|
|
b8296a91b9 | ||
|
|
6e9452b06e | ||
|
|
888fd35689 | ||
|
|
1fb0fbca58 | ||
|
|
ddd2a109e4 | ||
|
|
87504a528e | ||
|
|
6eac4af7db | ||
|
|
89de1340af | ||
|
|
9e753a0d44 | ||
|
|
f57920ad90 | ||
|
|
7dbbbef51a | ||
|
|
7add75126d | ||
|
|
d207672bd5 | ||
|
|
851fa38251 | ||
|
|
43345fb642 | ||
|
|
9ff33814ea | ||
|
|
21b94d2de5 | ||
|
|
24236860c8 | ||
|
|
3f868fd792 | ||
|
|
417bca03df | ||
|
|
cce6aef557 | ||
|
|
eb3afc03b5 | ||
|
|
7f0caf0683 | ||
|
|
7f606665cb | ||
|
|
202872c2dc | ||
|
|
93aeae1985 | ||
|
|
5c34ab6692 | ||
|
|
390b49653b | ||
|
|
b877c385f0 | ||
|
|
9c47739c0e | ||
|
|
04aa2992e7 | ||
|
|
2f1edeff78 | ||
|
|
306d51cdcf | ||
|
|
54a026525a | ||
|
|
d6588fc835 | ||
|
|
5a7b41a2cf | ||
|
|
338a73d764 | ||
|
|
c20954f020 | ||
|
|
1e8e1d3c9d | ||
|
|
f6b1962056 | ||
|
|
b3b101a89c | ||
|
|
9615c4edf1 | ||
|
|
85a75ee035 | ||
|
|
1e5bd55672 | ||
|
|
37d4015d56 | ||
|
|
6b27554cdb | ||
|
|
fc1b119159 | ||
|
|
2cd0d4a9f7 | ||
|
|
fd03aabeb2 | ||
|
|
8068c975c2 | ||
|
|
a6d2ab3360 | ||
|
|
fe7b91dfd9 | ||
|
|
5784101bea | ||
|
|
eaf6eb8978 | ||
|
|
3af63bcf1f | ||
|
|
80a21f7a75 | ||
|
|
0b33dc6ce1 | ||
|
|
64a6ced62e | ||
|
|
438f6c96cd | ||
|
|
6ae085f974 | ||
|
|
cb8e97274e | ||
|
|
c4185e81e8 | ||
|
|
0580fe9046 | ||
|
|
1b1bc9ea36 | ||
|
|
c2614467cf | ||
|
|
077ae51f05 | ||
|
|
ee40212e97 | ||
|
|
7f5f6efbac | ||
|
|
45d4c57d91 | ||
|
|
41e0208335 | ||
|
|
2f8238342b | ||
|
|
e1582b8323 | ||
|
|
7cfa6f0265 | ||
|
|
e3973c74e7 | ||
|
|
a8deca2dd9 | ||
|
|
a78ade1771 | ||
|
|
79d2ef4616 | ||
|
|
5edc3f755c | ||
|
|
288976310b | ||
|
|
58b5be8ab6 | ||
|
|
26d7896877 | ||
|
|
fd6bc7308f | ||
|
|
6c41c95f28 | ||
|
|
446e04469d | ||
|
|
5097e563df | ||
|
|
c7ad97c641 | ||
|
|
9516fe3324 | ||
|
|
20cdbac8c3 | ||
|
|
e3e7b3360c | ||
|
|
655dfb8328 | ||
|
|
9b9c67b768 | ||
|
|
5b7457ff08 | ||
|
|
48adad5454 | ||
|
|
b27dc3eb17 | ||
|
|
e89eebb7ba | ||
|
|
fee404399a | ||
|
|
6b4805ca1a | ||
|
|
159699b5d7 | ||
|
|
af809c9661 | ||
|
|
329de8f416 | ||
|
|
e825b07e85 | ||
|
|
71fdb99a07 | ||
|
|
55ee4186aa | ||
|
|
941b0a0ff7 | ||
|
|
6aae12288e | ||
|
|
302cc552ef | ||
|
|
a2a4df0886 | ||
|
|
3399e39968 | ||
|
|
87874bba88 | ||
|
|
c304fc4333 | ||
|
|
6977cf268f | ||
|
|
931c78a70c | ||
|
|
8d23646fe6 | ||
|
|
656963e018 | ||
|
|
644277faf1 | ||
|
|
0558dfee79 | ||
|
|
487c8fe88f | ||
|
|
0d171ba1d8 | ||
|
|
2069bbc8b5 | ||
|
|
053d628b53 | ||
|
|
6bc592e6c9 | ||
|
|
6c76d8cd1c | ||
|
|
a09e411936 | ||
|
|
02a7b96f33 | ||
|
|
e55e029ae8 | ||
|
|
6b18b144cf | ||
|
|
6d53089cc1 | ||
|
|
e85a8a68d0 | ||
|
|
dc55e68524 | ||
|
|
462c68b625 | ||
|
|
999d374f0c | ||
|
|
b208aa675e | ||
|
|
2b98fee136 | ||
|
|
e5e75efebc | ||
|
|
4a4fef2daf | ||
|
|
ecb6b234cc | ||
|
|
39dbc8acdb | ||
|
|
a56489bc7f | ||
|
|
99927c7071 | ||
|
|
3e28403978 | ||
|
|
37370f057f | ||
|
|
f4b46fad27 | ||
|
|
9d2c6a95f4 | ||
|
|
376a76d1d3 | ||
|
|
1fcc07e54e | ||
|
|
8db3345c2f | ||
|
|
69aa2fea68 | ||
|
|
298749bfcd | ||
|
|
f1f31baae1 | ||
|
|
e1c8f19e8f | ||
|
|
5e302c70e9 | ||
|
|
4c5a679066 | ||
|
|
41f0b2c354 | ||
|
|
a0a3c349c9 | ||
|
|
bc3983181d | ||
|
|
980b58ef5a | ||
|
|
a2604c0963 | ||
|
|
6dbc108da2 | ||
|
|
bd98f988f0 | ||
|
|
06301c7847 | ||
|
|
18a1aeaa91 | ||
|
|
c9f16b6430 | ||
|
|
bc9d2abdb6 | ||
|
|
28810c178f | ||
|
|
a9e64efe45 | ||
|
|
6b5886c034 | ||
|
|
7727ad43af | ||
|
|
bbe10f4f77 | ||
|
|
5e72709613 | ||
|
|
9e85cba0d0 | ||
|
|
4b59ced08f | ||
|
|
8dbdd55730 | ||
|
|
6725151a99 | ||
|
|
d4f3d5a164 | ||
|
|
7b5ccc45bc | ||
|
|
940214a1a2 | ||
|
|
68bd410159 | ||
|
|
b13fcfd831 | ||
|
|
07ef2b051c | ||
|
|
3fc795340d | ||
|
|
70cfa6af13 | ||
|
|
dbcaec59ae | ||
|
|
faedae708e | ||
|
|
0c66521b23 | ||
|
|
bf92862459 | ||
|
|
1a68698d76 | ||
|
|
842a73357c | ||
|
|
5efdeccdbb | ||
|
|
050777b8c4 | ||
|
|
a4d78e2200 | ||
|
|
b49f22cdf9 | ||
|
|
7e483b0c25 | ||
|
|
3cf9ae04c7 | ||
|
|
bf0cb4bfe2 | ||
|
|
773133c4ce | ||
|
|
ca0b3b6fd7 | ||
|
|
f4731c0514 | ||
|
|
34f16e5b7d | ||
|
|
83e9af6601 | ||
|
|
8bbf9335e1 | ||
|
|
159f30b37f | ||
|
|
2e3dc75425 | ||
|
|
7d3575b362 | ||
|
|
35d407021c | ||
|
|
076f49d447 | ||
|
|
0665fe0413 | ||
|
|
669a6fee40 | ||
|
|
8aab0fc189 | ||
|
|
5d6eb5bfd6 | ||
|
|
cf4711d878 | ||
|
|
21d664d670 | ||
|
|
ab182e276b | ||
|
|
96a3250152 | ||
|
|
f5746002fd | ||
|
|
e1e3339770 | ||
|
|
3a5086796d | ||
|
|
11300913a4 | ||
|
|
e65f14cbed | ||
|
|
6898849e3e | ||
|
|
2d61691bb2 | ||
|
|
eba9e04e2e | ||
|
|
33f32de690 | ||
|
|
93b8f61551 | ||
|
|
7f17a9d1b4 | ||
|
|
d34e4cf698 | ||
|
|
6b592137b9 | ||
|
|
d5e72bf55d | ||
|
|
5677e5e133 | ||
|
|
7a11a06cbd | ||
|
|
a5862d4b9c | ||
|
|
a50909e806 | ||
|
|
7c8f7d3f20 | ||
|
|
9078197446 | ||
|
|
0fe07cf9fe | ||
|
|
2216169ca1 | ||
|
|
50e989ca85 | ||
|
|
fa1fc3d855 | ||
|
|
bbe696e925 | ||
|
|
5d12f523a3 | ||
|
|
d295d20dc4 | ||
|
|
2ba10071c9 | ||
|
|
505dc0491b | ||
|
|
54a4ab0f26 | ||
|
|
e03e91477b | ||
|
|
88ac397158 | ||
|
|
6fd4be580b | ||
|
|
53348feb89 | ||
|
|
337cdbb37c | ||
|
|
05fdf91fc5 | ||
|
|
c387689d1c | ||
|
|
cb9238dc4e | ||
|
|
a484811f78 | ||
|
|
111d1934c4 | ||
|
|
972fb1a29d | ||
|
|
3a6af27586 | ||
|
|
c89ac341e4 | ||
|
|
cd59e5d07b | ||
|
|
0b940e4b2b | ||
|
|
b29375c844 | ||
|
|
76d3f6d248 | ||
|
|
e87a85a179 | ||
|
|
11407bf656 | ||
|
|
c82fb3c9b9 | ||
|
|
309e1d8619 | ||
|
|
c2db67c1c0 | ||
|
|
9526594905 | ||
|
|
3d74d277aa | ||
|
|
fc274c2ba4 | ||
|
|
ce43ea9f42 | ||
|
|
21da02fac2 | ||
|
|
19569bd5c5 | ||
|
|
afa25d8c57 | ||
|
|
1ba7acf4bd | ||
|
|
a847fe8754 | ||
|
|
5bb18b6441 | ||
|
|
876c233a26 | ||
|
|
ee5aeb80a4 | ||
|
|
02ceae15a2 | ||
|
|
e514739280 | ||
|
|
72265298f9 | ||
|
|
4b700192c1 | ||
|
|
fe83589ade | ||
|
|
fcf63c74f1 | ||
|
|
c95bb109c8 | ||
|
|
bd9c46ee34 | ||
|
|
736aeaa1d3 | ||
|
|
dd1f26522c | ||
|
|
712b7b2188 | ||
|
|
5b749e2d5c |
20
.editorconfig
Normal file
20
.editorconfig
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*.{sh,bash}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
simplify = true
|
||||||
|
binary_next_line = false
|
||||||
|
switch_case_indent = true
|
||||||
|
space_redirects = true
|
||||||
|
function_next_line = false
|
||||||
|
|
||||||
|
# also bash scripts.
|
||||||
|
[{install,uninstall,bin/fzf-preview.sh,bin/fzf-tmux}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
simplify = true
|
||||||
|
binary_next_line = false
|
||||||
|
switch_case_indent = true
|
||||||
|
space_redirects = true
|
||||||
|
function_next_line = false
|
||||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
github: junegunn
|
||||||
30
.github/ISSUE_TEMPLATE.md
vendored
30
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,30 +0,0 @@
|
|||||||
<!-- Check all that apply [x] -->
|
|
||||||
- Category
|
|
||||||
- [ ] fzf binary
|
|
||||||
- [ ] fzf-tmux script
|
|
||||||
- [ ] Key bindings
|
|
||||||
- [ ] Completion
|
|
||||||
- [ ] Vim
|
|
||||||
- [ ] Neovim
|
|
||||||
- [ ] Etc.
|
|
||||||
- OS
|
|
||||||
- [ ] Linux
|
|
||||||
- [ ] Mac OS X
|
|
||||||
- [ ] Windows
|
|
||||||
- [ ] Windows Subsystem for Linux
|
|
||||||
- [ ] Etc.
|
|
||||||
- Shell
|
|
||||||
- [ ] bash
|
|
||||||
- [ ] zsh
|
|
||||||
- [ ] fish
|
|
||||||
|
|
||||||
<!--
|
|
||||||
### Before submitting
|
|
||||||
|
|
||||||
- Make sure that you have the latest version of fzf
|
|
||||||
- If you use tmux, make sure $TERM is set to screen or screen-256color
|
|
||||||
- For more Vim stuff, check out https://github.com/junegunn/fzf.vim
|
|
||||||
|
|
||||||
Describe your problem or suggestion from here ...
|
|
||||||
-->
|
|
||||||
|
|
||||||
49
.github/ISSUE_TEMPLATE/issue_template.yml
vendored
Normal file
49
.github/ISSUE_TEMPLATE/issue_template.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
---
|
||||||
|
name: Issue Template
|
||||||
|
description: Report a problem or bug related to fzf to help us improve
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: ISSUES NOT FOLLOWING THIS TEMPLATE WILL BE CLOSED AND DELETED
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Checklist
|
||||||
|
options:
|
||||||
|
- label: I have read through the manual page (`man fzf`)
|
||||||
|
required: true
|
||||||
|
- label: I have searched through the existing issues
|
||||||
|
required: true
|
||||||
|
- label: For bug reports, I have checked if the bug is reproducible in the latest version of fzf
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Output of `fzf --version`
|
||||||
|
placeholder: e.g. 0.48.1 (d579e33)
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: OS
|
||||||
|
options:
|
||||||
|
- label: Linux
|
||||||
|
- label: macOS
|
||||||
|
- label: Windows
|
||||||
|
- label: Etc.
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Shell
|
||||||
|
options:
|
||||||
|
- label: bash
|
||||||
|
- label: zsh
|
||||||
|
- label: fish
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Problem / Steps to reproduce
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "gomod"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
44
.github/workflows/codeql-analysis.yml
vendored
Normal file
44
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning
|
||||||
|
name: CodeQL
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master, devel ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
permissions:
|
||||||
|
actions: read # for github/codeql-action/init to get workflow details
|
||||||
|
contents: read # for actions/checkout to fetch code
|
||||||
|
security-events: write # for github/codeql-action/autobuild to send a status report
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: ['go']
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v4
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v4
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v4
|
||||||
14
.github/workflows/depsreview.yaml
vendored
Normal file
14
.github/workflows/depsreview.yaml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
name: 'Dependency Review'
|
||||||
|
on: [pull_request]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-review:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout Repository'
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
- name: 'Dependency Review'
|
||||||
|
uses: actions/dependency-review-action@v4
|
||||||
48
.github/workflows/linux.yml
vendored
Normal file
48
.github/workflows/linux.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
---
|
||||||
|
name: build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master, devel ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
env:
|
||||||
|
LANG: C.UTF-8
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
go-version: "1.23"
|
||||||
|
|
||||||
|
- name: Setup Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 3.4.6
|
||||||
|
|
||||||
|
- name: Install packages
|
||||||
|
run: sudo apt-get install --yes zsh fish tmux shfmt
|
||||||
|
|
||||||
|
- name: Install Ruby gems
|
||||||
|
run: bundle install
|
||||||
|
|
||||||
|
- name: Rubocop
|
||||||
|
run: make lint
|
||||||
|
|
||||||
|
- name: Unit test
|
||||||
|
run: make test
|
||||||
|
|
||||||
|
- name: Integration test
|
||||||
|
run: make install && ./install --all && tmux new-session -d && ruby test/runner.rb --verbose
|
||||||
45
.github/workflows/macos.yml
vendored
Normal file
45
.github/workflows/macos.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
name: Test fzf on macOS
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master, devel ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
go-version: "1.23"
|
||||||
|
|
||||||
|
- name: Setup Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 3.0.0
|
||||||
|
|
||||||
|
- name: Install packages
|
||||||
|
run: HOMEBREW_NO_INSTALL_CLEANUP=1 brew install fish zsh tmux shfmt
|
||||||
|
|
||||||
|
- name: Install Ruby gems
|
||||||
|
run: gem install --no-document minitest:5.14.2 rubocop:1.0.0 rubocop-minitest:0.10.1 rubocop-performance:1.8.1
|
||||||
|
|
||||||
|
- name: Rubocop
|
||||||
|
run: rubocop --require rubocop-minitest --require rubocop-performance
|
||||||
|
|
||||||
|
- name: Unit test
|
||||||
|
run: make test
|
||||||
|
|
||||||
|
- name: Integration test
|
||||||
|
run: make install && ./install --all && LC_ALL=C tmux new-session -d && ruby test/test_go.rb --verbose
|
||||||
24
.github/workflows/sponsors.yml
vendored
Normal file
24
.github/workflows/sponsors.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
name: Generate Sponsors README
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: 0 0 * * 0
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout 🛎️
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Generate Sponsors 💖
|
||||||
|
uses: JamesIves/github-sponsors-readme-action@v1
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.SPONSORS_TOKEN }}
|
||||||
|
file: 'README.md'
|
||||||
|
|
||||||
|
- name: Deploy to GitHub Pages 🚀
|
||||||
|
uses: JamesIves/github-pages-deploy-action@v4
|
||||||
|
with:
|
||||||
|
branch: master
|
||||||
|
folder: '.'
|
||||||
10
.github/workflows/typos.yml
vendored
Normal file
10
.github/workflows/typos.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
name: "Spell Check"
|
||||||
|
on: [pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
typos:
|
||||||
|
name: Spell Check with Typos
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
- uses: crate-ci/typos@v1.29.4
|
||||||
14
.github/workflows/winget.yml
vendored
Normal file
14
.github/workflows/winget.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
name: Publish to Winget
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [released]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: vedantmgoyal2009/winget-releaser@v2
|
||||||
|
with:
|
||||||
|
identifier: junegunn.fzf
|
||||||
|
installers-regex: '-windows_(armv7|arm64|amd64)\.zip$'
|
||||||
|
token: ${{ secrets.WINGET_TOKEN }}
|
||||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -1,6 +1,14 @@
|
|||||||
bin
|
bin/fzf
|
||||||
src/fzf/fzf-*
|
bin/fzf.exe
|
||||||
gopath
|
dist
|
||||||
|
target
|
||||||
pkg
|
pkg
|
||||||
Gemfile.lock
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
doc/tags
|
||||||
|
vendor
|
||||||
|
gopath
|
||||||
|
*.zwc
|
||||||
|
fzf
|
||||||
|
tmp
|
||||||
|
*.patch
|
||||||
|
.idea
|
||||||
|
|||||||
109
.goreleaser.yml
Normal file
109
.goreleaser.yml
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
---
|
||||||
|
version: 2
|
||||||
|
project_name: fzf
|
||||||
|
|
||||||
|
before:
|
||||||
|
hooks:
|
||||||
|
- go mod download
|
||||||
|
|
||||||
|
builds:
|
||||||
|
- id: fzf
|
||||||
|
goos:
|
||||||
|
- darwin
|
||||||
|
- linux
|
||||||
|
- windows
|
||||||
|
- freebsd
|
||||||
|
- openbsd
|
||||||
|
- android
|
||||||
|
goarch:
|
||||||
|
- amd64
|
||||||
|
- arm
|
||||||
|
- arm64
|
||||||
|
- loong64
|
||||||
|
- ppc64le
|
||||||
|
- s390x
|
||||||
|
goarm:
|
||||||
|
- "5"
|
||||||
|
- "6"
|
||||||
|
- "7"
|
||||||
|
flags:
|
||||||
|
- -trimpath
|
||||||
|
ldflags:
|
||||||
|
- "-s -w -X main.version={{ .Version }} -X main.revision={{ .ShortCommit }}"
|
||||||
|
ignore:
|
||||||
|
- goos: freebsd
|
||||||
|
goarch: arm
|
||||||
|
- goos: openbsd
|
||||||
|
goarch: arm
|
||||||
|
- goos: freebsd
|
||||||
|
goarch: arm64
|
||||||
|
- goos: openbsd
|
||||||
|
goarch: arm64
|
||||||
|
- goos: android
|
||||||
|
goarch: amd64
|
||||||
|
- goos: android
|
||||||
|
goarch: arm
|
||||||
|
|
||||||
|
# .goreleaser.yaml
|
||||||
|
notarize:
|
||||||
|
macos:
|
||||||
|
- # Whether this configuration is enabled or not.
|
||||||
|
#
|
||||||
|
# Default: false.
|
||||||
|
# Templates: allowed.
|
||||||
|
enabled: "{{ not .IsSnapshot }}"
|
||||||
|
|
||||||
|
# Before notarizing, we need to sign the binary.
|
||||||
|
# This blocks defines the configuration for doing so.
|
||||||
|
sign:
|
||||||
|
# The .p12 certificate file path or its base64'd contents.
|
||||||
|
certificate: "{{.Env.MACOS_SIGN_P12}}"
|
||||||
|
|
||||||
|
# The password to be used to open the certificate.
|
||||||
|
password: "{{.Env.MACOS_SIGN_PASSWORD}}"
|
||||||
|
|
||||||
|
# Then, we notarize the binaries.
|
||||||
|
notarize:
|
||||||
|
# The issuer ID.
|
||||||
|
# Its the UUID you see when creating the App Store Connect key.
|
||||||
|
issuer_id: "{{.Env.MACOS_NOTARY_ISSUER_ID}}"
|
||||||
|
|
||||||
|
# Key ID.
|
||||||
|
# You can see it in the list of App Store Connect Keys.
|
||||||
|
# It will also be in the ApiKey filename.
|
||||||
|
key_id: "{{.Env.MACOS_NOTARY_KEY_ID}}"
|
||||||
|
|
||||||
|
# The .p8 key file path or its base64'd contents.
|
||||||
|
key: "{{.Env.MACOS_NOTARY_KEY}}"
|
||||||
|
|
||||||
|
# Whether to wait for the notarization to finish.
|
||||||
|
# Not recommended, as it could take a really long time.
|
||||||
|
wait: true
|
||||||
|
|
||||||
|
archives:
|
||||||
|
- name_template: "{{ .ProjectName }}-{{ .Version }}-{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}"
|
||||||
|
builds:
|
||||||
|
- fzf
|
||||||
|
format: tar.gz
|
||||||
|
format_overrides:
|
||||||
|
- goos: windows
|
||||||
|
format: zip
|
||||||
|
files:
|
||||||
|
- non-existent*
|
||||||
|
|
||||||
|
release:
|
||||||
|
github:
|
||||||
|
owner: junegunn
|
||||||
|
name: fzf
|
||||||
|
prerelease: auto
|
||||||
|
name_template: '{{ .Version }}'
|
||||||
|
|
||||||
|
snapshot:
|
||||||
|
name_template: "{{ .Version }}-devel"
|
||||||
|
|
||||||
|
changelog:
|
||||||
|
sort: asc
|
||||||
|
filters:
|
||||||
|
exclude:
|
||||||
|
- README
|
||||||
|
- test
|
||||||
42
.rubocop.yml
Normal file
42
.rubocop.yml
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
AllCops:
|
||||||
|
NewCops: enable
|
||||||
|
Layout/LineLength:
|
||||||
|
Enabled: false
|
||||||
|
Metrics:
|
||||||
|
Enabled: false
|
||||||
|
Lint/ShadowingOuterLocalVariable:
|
||||||
|
Enabled: false
|
||||||
|
Lint/NestedMethodDefinition:
|
||||||
|
Enabled: false
|
||||||
|
Style/MethodCallWithArgsParentheses:
|
||||||
|
Enabled: true
|
||||||
|
AllowedMethods:
|
||||||
|
- assert
|
||||||
|
- exit
|
||||||
|
- paste
|
||||||
|
- puts
|
||||||
|
- raise
|
||||||
|
- refute
|
||||||
|
- require
|
||||||
|
- send_keys
|
||||||
|
AllowedPatterns:
|
||||||
|
- ^assert_
|
||||||
|
- ^refute_
|
||||||
|
Style/NumericPredicate:
|
||||||
|
Enabled: false
|
||||||
|
Style/StringConcatenation:
|
||||||
|
Enabled: false
|
||||||
|
Style/OptionalBooleanParameter:
|
||||||
|
Enabled: false
|
||||||
|
Style/WordArray:
|
||||||
|
MinSize: 1
|
||||||
|
Minitest/AssertEqual:
|
||||||
|
Enabled: false
|
||||||
|
Minitest/EmptyLineBeforeAssertionMethods:
|
||||||
|
Enabled: false
|
||||||
|
Naming/VariableNumber:
|
||||||
|
Enabled: false
|
||||||
|
Lint/EmptyBlock:
|
||||||
|
Enabled: false
|
||||||
|
Style/SafeNavigationChainLength:
|
||||||
|
Enabled: false
|
||||||
3
.tool-versions
Normal file
3
.tool-versions
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
golang 1.23
|
||||||
|
ruby 3.4
|
||||||
|
shfmt 3.12
|
||||||
28
.travis.yml
28
.travis.yml
@@ -1,28 +0,0 @@
|
|||||||
language: ruby
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- env: TAGS=
|
|
||||||
rvm: 2.3.3
|
|
||||||
# - env: TAGS=tcell
|
|
||||||
# rvm: 2.2.0
|
|
||||||
|
|
||||||
install:
|
|
||||||
- sudo apt-get update
|
|
||||||
- sudo apt-get install -y libncurses-dev lib32ncurses5-dev libgpm-dev
|
|
||||||
- sudo add-apt-repository -y ppa:pi-rho/dev
|
|
||||||
- sudo apt-add-repository -y ppa:fish-shell/release-2
|
|
||||||
- sudo apt-get update
|
|
||||||
- sudo apt-get install -y tmux=1.9a-1~ppa1~p
|
|
||||||
- sudo apt-get install -y zsh fish
|
|
||||||
|
|
||||||
script: |
|
|
||||||
export GOPATH=~/go
|
|
||||||
export FZF_BASE=$GOPATH/src/github.com/junegunn/fzf
|
|
||||||
|
|
||||||
mkdir -p $GOPATH/src/github.com/junegunn
|
|
||||||
ln -s $(pwd) $FZF_BASE
|
|
||||||
|
|
||||||
cd $FZF_BASE/src && make test fzf/fzf-linux_amd64 install &&
|
|
||||||
cd $FZF_BASE/bin && ln -sf fzf-linux_amd64 fzf-$(./fzf --version)-linux_amd64 &&
|
|
||||||
cd $FZF_BASE && yes | ./install && rm -f fzf &&
|
|
||||||
tmux new "ruby test/test_go.rb > out && touch ok" && cat out && [ -e ok ]
|
|
||||||
714
ADVANCED.md
Normal file
714
ADVANCED.md
Normal file
@@ -0,0 +1,714 @@
|
|||||||
|
Advanced fzf examples
|
||||||
|
======================
|
||||||
|
|
||||||
|
* *Last update: 2025/02/02*
|
||||||
|
* *Requires fzf 0.59.0 or later*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<!-- vim-markdown-toc GFM -->
|
||||||
|
|
||||||
|
* [Introduction](#introduction)
|
||||||
|
* [Display modes](#display-modes)
|
||||||
|
* [`--height`](#--height)
|
||||||
|
* [`--tmux`](#--tmux)
|
||||||
|
* [Dynamic reloading of the list](#dynamic-reloading-of-the-list)
|
||||||
|
* [Updating the list of processes by pressing CTRL-R](#updating-the-list-of-processes-by-pressing-ctrl-r)
|
||||||
|
* [Toggling between data sources](#toggling-between-data-sources)
|
||||||
|
* [Toggling with a single key binding](#toggling-with-a-single-key-binding)
|
||||||
|
* [Ripgrep integration](#ripgrep-integration)
|
||||||
|
* [Using fzf as the secondary filter](#using-fzf-as-the-secondary-filter)
|
||||||
|
* [Using fzf as interactive Ripgrep launcher](#using-fzf-as-interactive-ripgrep-launcher)
|
||||||
|
* [Switching to fzf-only search mode](#switching-to-fzf-only-search-mode)
|
||||||
|
* [Switching between Ripgrep mode and fzf mode](#switching-between-ripgrep-mode-and-fzf-mode)
|
||||||
|
* [Switching between Ripgrep mode and fzf mode using a single key binding](#switching-between-ripgrep-mode-and-fzf-mode-using-a-single-key-binding)
|
||||||
|
* [Controlling Ripgrep search and fzf search simultaneously](#controlling-ripgrep-search-and-fzf-search-simultaneously)
|
||||||
|
* [Log tailing](#log-tailing)
|
||||||
|
* [Key bindings for git objects](#key-bindings-for-git-objects)
|
||||||
|
* [Files listed in `git status`](#files-listed-in-git-status)
|
||||||
|
* [Branches](#branches)
|
||||||
|
* [Commit hashes](#commit-hashes)
|
||||||
|
* [Color themes](#color-themes)
|
||||||
|
* [fzf Theme Playground](#fzf-theme-playground)
|
||||||
|
* [Generating fzf color theme from Vim color schemes](#generating-fzf-color-theme-from-vim-color-schemes)
|
||||||
|
|
||||||
|
<!-- vim-markdown-toc -->
|
||||||
|
|
||||||
|
Introduction
|
||||||
|
------------
|
||||||
|
|
||||||
|
fzf is an interactive [Unix filter][filter] program that is designed to be
|
||||||
|
used with other Unix tools. It reads a list of items from the standard input,
|
||||||
|
allows you to select a subset of the items, and prints the selected ones to
|
||||||
|
the standard output. You can think of it as an interactive version of *grep*,
|
||||||
|
and it's already useful even if you don't know any of its options.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# 1. ps: Feed the list of processes to fzf
|
||||||
|
# 2. fzf: Interactively select a process using fuzzy matching algorithm
|
||||||
|
# 3. awk: Take the PID from the selected line
|
||||||
|
# 3. kill: Kill the process with the PID
|
||||||
|
ps -ef | fzf | awk '{print $2}' | xargs kill -9
|
||||||
|
```
|
||||||
|
|
||||||
|
[filter]: https://en.wikipedia.org/wiki/Filter_(software)
|
||||||
|
|
||||||
|
While the above example succinctly summarizes the fundamental concept of fzf,
|
||||||
|
you can build much more sophisticated interactive workflows using fzf once you
|
||||||
|
learn its wide variety of features.
|
||||||
|
|
||||||
|
- To see the full list of options and features, see `man fzf`
|
||||||
|
- To see the latest additions, see [CHANGELOG.md](CHANGELOG.md)
|
||||||
|
|
||||||
|
This document will guide you through some examples that will familiarize you
|
||||||
|
with the advanced features of fzf.
|
||||||
|
|
||||||
|
Display modes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
### `--height`
|
||||||
|
|
||||||
|
fzf by default opens in fullscreen mode, but it's not always desirable.
|
||||||
|
Oftentimes, you want to see the current context of the terminal while using
|
||||||
|
fzf. `--height` is an option for opening fzf below the cursor in
|
||||||
|
non-fullscreen mode so you can still see the previous commands and their
|
||||||
|
results above it.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
fzf --height=40%
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
You might also want to experiment with other layout options such as
|
||||||
|
`--layout=reverse`, `--info=inline`, `--border`, `--margin`, etc.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
fzf --height=40% --layout=reverse
|
||||||
|
fzf --height=40% --layout=reverse --info=inline
|
||||||
|
fzf --height=40% --layout=reverse --info=inline --border
|
||||||
|
fzf --height=40% --layout=reverse --info=inline --border --margin=1
|
||||||
|
fzf --height=40% --layout=reverse --info=inline --border --margin=1 --padding=1
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
*(See man page to see the full list of options)*
|
||||||
|
|
||||||
|
But you definitely don't want to repeat `--height=40% --layout=reverse
|
||||||
|
--info=inline --border --margin=1 --padding=1` every time you use fzf. You
|
||||||
|
could write a wrapper script or shell alias, but there is an easier option.
|
||||||
|
Define `$FZF_DEFAULT_OPTS` like so:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
export FZF_DEFAULT_OPTS="--height=40% --layout=reverse --info=inline --border --margin=1 --padding=1"
|
||||||
|
```
|
||||||
|
|
||||||
|
### `--tmux`
|
||||||
|
|
||||||
|
(Requires tmux 3.3 or later)
|
||||||
|
|
||||||
|
If you're using tmux, you can open fzf in a tmux popup using `--tmux` option.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Open fzf in a tmux popup at the center of the screen with 70% width and height
|
||||||
|
fzf --tmux 70%
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
`--tmux` option is silently ignored if you're not on tmux. So if you're trying
|
||||||
|
to avoid opening fzf in fullscreen, try specifying both `--height` and `--tmux`.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# --tmux is specified later so it takes precedence over --height when on tmux.
|
||||||
|
# If you're not on tmux, --tmux is ignored and --height is used instead.
|
||||||
|
fzf --height 70% --tmux 70%
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also specify the position, width, and height of the popup window in
|
||||||
|
the following format:
|
||||||
|
|
||||||
|
* `[center|top|bottom|left|right][,SIZE[%]][,SIZE[%][,border-native]]`
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# 100% width and 60% height
|
||||||
|
fzf --tmux 100%,60% --border horizontal
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
```sh
|
||||||
|
# On the right (50% width)
|
||||||
|
fzf --tmux right
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
```sh
|
||||||
|
# On the left (40% width and 70% height)
|
||||||
|
fzf --tmux left,40%,70%
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> You might also want to check out my tmux plugins which support this popup
|
||||||
|
> window layout.
|
||||||
|
>
|
||||||
|
> - https://github.com/junegunn/tmux-fzf-url
|
||||||
|
> - https://github.com/junegunn/tmux-fzf-maccy
|
||||||
|
|
||||||
|
Dynamic reloading of the list
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
fzf can dynamically update the candidate list using an arbitrary program with
|
||||||
|
`reload` bindings (The design document for `reload` can be found
|
||||||
|
[here][reload]).
|
||||||
|
|
||||||
|
[reload]: https://github.com/junegunn/fzf/issues/1750
|
||||||
|
|
||||||
|
### Updating the list of processes by pressing CTRL-R
|
||||||
|
|
||||||
|
This example shows how you can set up a binding for dynamically updating the
|
||||||
|
list without restarting fzf.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
(date; ps -ef) |
|
||||||
|
fzf --bind='ctrl-r:reload(date; ps -ef)' \
|
||||||
|
--header=$'Press CTRL-R to reload\n\n' --header-lines=2 \
|
||||||
|
--preview='echo {}' --preview-window=down,3,wrap \
|
||||||
|
--layout=reverse --height=80% | awk '{print $2}' | xargs kill -9
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
- The initial command is `(date; ps -ef)`. It prints the current date and
|
||||||
|
time, and the list of the processes.
|
||||||
|
- With `--header` option, you can show any message as the fixed header.
|
||||||
|
- To disallow selecting the first two lines (`date` and `ps` header), we use
|
||||||
|
`--header-lines=2` option.
|
||||||
|
- `--bind='ctrl-r:reload(date; ps -ef)'` binds CTRL-R to `reload` action that
|
||||||
|
runs `date; ps -ef`, so we can update the list of the processes by pressing
|
||||||
|
CTRL-R.
|
||||||
|
- We use simple `echo {}` preview option, so we can see the entire line on the
|
||||||
|
preview window below even if it's too long
|
||||||
|
|
||||||
|
### Toggling between data sources
|
||||||
|
|
||||||
|
You're not limited to just one reload binding. Set up multiple bindings so
|
||||||
|
you can switch between data sources.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
find * | fzf --prompt 'All> ' \
|
||||||
|
--header 'CTRL-D: Directories / CTRL-F: Files' \
|
||||||
|
--bind 'ctrl-d:change-prompt(Directories> )+reload(find * -type d)' \
|
||||||
|
--bind 'ctrl-f:change-prompt(Files> )+reload(find * -type f)'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### Toggling with a single key binding
|
||||||
|
|
||||||
|
The above example uses two different key bindings to toggle between two modes,
|
||||||
|
but can we just use a single key binding?
|
||||||
|
|
||||||
|
To make a key binding behave differently each time it is pressed, we need:
|
||||||
|
|
||||||
|
1. a way to store the current state. i.e. "which mode are we in?"
|
||||||
|
2. and a way to dynamically perform different actions depending on the state.
|
||||||
|
|
||||||
|
The following example shows how to 1. store the current mode in the prompt
|
||||||
|
string, 2. and use this information (`$FZF_PROMPT`) to determine which
|
||||||
|
actions to perform using the `transform` action.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
fd --type file |
|
||||||
|
fzf --prompt 'Files> ' \
|
||||||
|
--header 'CTRL-T: Switch between Files/Directories' \
|
||||||
|
--bind 'ctrl-t:transform:[[ ! $FZF_PROMPT =~ Files ]] &&
|
||||||
|
echo "change-prompt(Files> )+reload(fd --type file)" ||
|
||||||
|
echo "change-prompt(Directories> )+reload(fd --type directory)"' \
|
||||||
|
--preview '[[ $FZF_PROMPT =~ Files ]] && bat --color=always {} || tree -C {}'
|
||||||
|
```
|
||||||
|
|
||||||
|
Ripgrep integration
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
### Using fzf as the secondary filter
|
||||||
|
|
||||||
|
* Requires [bat][bat]
|
||||||
|
* Requires [Ripgrep][rg]
|
||||||
|
|
||||||
|
[bat]: https://github.com/sharkdp/bat
|
||||||
|
[rg]: https://github.com/BurntSushi/ripgrep
|
||||||
|
|
||||||
|
fzf is pretty fast for filtering a list that you will rarely have to think
|
||||||
|
about its performance. But it is not the right tool for searching for text
|
||||||
|
inside many large files, and in that case you should definitely use something
|
||||||
|
like [Ripgrep][rg].
|
||||||
|
|
||||||
|
In the next example, Ripgrep is the primary filter that searches for the given
|
||||||
|
text in files, and fzf is used as the secondary fuzzy filter that adds
|
||||||
|
interactivity to the workflow. And we use [bat][bat] to show the matching line in
|
||||||
|
the preview window.
|
||||||
|
|
||||||
|
This is a bash script and it will not run as expected on other non-compliant
|
||||||
|
shells. To avoid the compatibility issue, let's save this snippet as a script
|
||||||
|
file called `rfv`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# 1. Search for text in files using Ripgrep
|
||||||
|
# 2. Interactively narrow down the list using fzf
|
||||||
|
# 3. Open the file in Vim
|
||||||
|
rg --color=always --line-number --no-heading --smart-case "${*:-}" |
|
||||||
|
fzf --ansi \
|
||||||
|
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||||
|
--delimiter : \
|
||||||
|
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||||
|
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||||
|
--bind 'enter:become(vim {1} +{2})'
|
||||||
|
```
|
||||||
|
|
||||||
|
And run it with an initial query string.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Make the script executable
|
||||||
|
chmod +x rfv
|
||||||
|
|
||||||
|
# Run it with the initial query "algo"
|
||||||
|
./rfv algo
|
||||||
|
```
|
||||||
|
|
||||||
|
> Ripgrep will perform the initial search and list all the lines that contain
|
||||||
|
`algo`. Then we further narrow down the list on fzf.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
I know it's a lot to digest, let's try to break down the code.
|
||||||
|
|
||||||
|
- Ripgrep prints the matching lines in the following format
|
||||||
|
```
|
||||||
|
man/man1/fzf.1:54:.BI "--algo=" TYPE
|
||||||
|
man/man1/fzf.1:55:Fuzzy matching algorithm (default: v2)
|
||||||
|
man/man1/fzf.1:58:.BR v2 " Optimal scoring algorithm (quality)"
|
||||||
|
src/pattern_test.go:7: "github.com/junegunn/fzf/src/algo"
|
||||||
|
```
|
||||||
|
The first token delimited by `:` is the file path, and the second token is
|
||||||
|
the line number of the matching line. They respectively correspond to `{1}`
|
||||||
|
and `{2}` in the preview command.
|
||||||
|
- `--preview 'bat --color=always {1} --highlight-line {2}'`
|
||||||
|
- As we run `rg` with `--color=always` option, we should tell fzf to parse
|
||||||
|
ANSI color codes in the input by setting `--ansi`.
|
||||||
|
- We customize how fzf colors various text elements using `--color` option.
|
||||||
|
`-1` tells fzf to keep the original color from the input. See `man fzf` for
|
||||||
|
available color options.
|
||||||
|
- The value of `--preview-window` option consists of 5 components delimited
|
||||||
|
by `,`
|
||||||
|
1. `up` — Position of the preview window
|
||||||
|
1. `60%` — Size of the preview window
|
||||||
|
1. `border-bottom` — Preview window border only on the bottom side
|
||||||
|
1. `+{2}+3/3` — Scroll offset of the preview contents
|
||||||
|
1. `~3` — Fixed header
|
||||||
|
- Let's break down the latter two. We want to display the bat output in the
|
||||||
|
preview window with a certain scroll offset so that the matching line is
|
||||||
|
positioned near the center of the preview window.
|
||||||
|
- `+{2}` — The base offset is extracted from the second token
|
||||||
|
- `+3` — We add 3 lines to the base offset to compensate for the header
|
||||||
|
part of `bat` output
|
||||||
|
- ```
|
||||||
|
───────┬──────────────────────────────────────────────────────────
|
||||||
|
│ File: CHANGELOG.md
|
||||||
|
───────┼──────────────────────────────────────────────────────────
|
||||||
|
1 │ CHANGELOG
|
||||||
|
2 │ =========
|
||||||
|
3 │
|
||||||
|
4 │ 0.26.0
|
||||||
|
5 │ ------
|
||||||
|
```
|
||||||
|
- `/3` adjusts the offset so that the matching line is shown at a third
|
||||||
|
position in the window
|
||||||
|
- `~3` makes the top three lines fixed header so that they are always
|
||||||
|
visible regardless of the scroll offset
|
||||||
|
- Instead of using shell script to process the final output of fzf, we use
|
||||||
|
`become(...)` action which was added in [fzf 0.38.0][0.38.0] to turn fzf
|
||||||
|
into a new process that opens the file with `vim` (`vim {1}`) and move the
|
||||||
|
cursor to the line (`+{2}`).
|
||||||
|
|
||||||
|
[0.38.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0380
|
||||||
|
|
||||||
|
### Using fzf as interactive Ripgrep launcher
|
||||||
|
|
||||||
|
We have learned that we can bind `reload` action to a key (e.g.
|
||||||
|
`--bind=ctrl-r:execute(ps -ef)`). In the next example, we are going to **bind
|
||||||
|
`reload` action to `change` event** so that whenever the user *changes* the
|
||||||
|
query string on fzf, `reload` action is triggered.
|
||||||
|
|
||||||
|
Here is a variation of the above `rfv` script. fzf will restart Ripgrep every
|
||||||
|
time the user updates the query string on fzf. Searching and filtering is
|
||||||
|
completely done by Ripgrep, and fzf merely provides the interactive interface.
|
||||||
|
So we lose the "fuzziness", but the performance will be better on larger
|
||||||
|
projects, and it will free up memory as you narrow down the results.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# 1. Search for text in files using Ripgrep
|
||||||
|
# 2. Interactively restart Ripgrep with reload action
|
||||||
|
# 3. Open the file in Vim
|
||||||
|
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||||
|
INITIAL_QUERY="${*:-}"
|
||||||
|
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||||
|
--bind "start:reload:$RG_PREFIX {q}" \
|
||||||
|
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||||
|
--delimiter : \
|
||||||
|
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||||
|
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||||
|
--bind 'enter:become(vim {1} +{2})'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
- Instead of starting fzf in the usual `rg ... | fzf` form, we make it start
|
||||||
|
the initial Ripgrep process immediately via `start:reload` binding for the
|
||||||
|
consistency of the code.
|
||||||
|
- Filtering is no longer a responsibility of fzf; hence `--disabled`
|
||||||
|
- `{q}` in the reload command evaluates to the query string on fzf prompt.
|
||||||
|
- `sleep 0.1` in the reload command is for "debouncing". This small delay will
|
||||||
|
reduce the number of intermediate Ripgrep processes while we're typing in
|
||||||
|
a query.
|
||||||
|
|
||||||
|
### Switching to fzf-only search mode
|
||||||
|
|
||||||
|
In the previous example, we lost fuzzy matching capability as we completely
|
||||||
|
delegated search functionality to Ripgrep. But we can dynamically switch to
|
||||||
|
fzf-only search mode by *"unbinding"* `reload` action from `change` event.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Two-phase filtering with Ripgrep and fzf
|
||||||
|
#
|
||||||
|
# 1. Search for text in files using Ripgrep
|
||||||
|
# 2. Interactively restart Ripgrep with reload action
|
||||||
|
# * Press alt-enter to switch to fzf-only filtering
|
||||||
|
# 3. Open the file in Vim
|
||||||
|
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||||
|
INITIAL_QUERY="${*:-}"
|
||||||
|
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||||
|
--bind "start:reload:$RG_PREFIX {q}" \
|
||||||
|
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||||
|
--bind "alt-enter:unbind(change,alt-enter)+change-prompt(2. fzf> )+enable-search+clear-query" \
|
||||||
|
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||||
|
--prompt '1. ripgrep> ' \
|
||||||
|
--delimiter : \
|
||||||
|
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||||
|
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||||
|
--bind 'enter:become(vim {1} +{2})'
|
||||||
|
```
|
||||||
|
|
||||||
|
* Phase 1. Filtering with Ripgrep
|
||||||
|

|
||||||
|
* Phase 2. Filtering with fzf
|
||||||
|

|
||||||
|
|
||||||
|
- We added `--prompt` option to show that fzf is initially running in "Ripgrep
|
||||||
|
launcher mode".
|
||||||
|
- We added `alt-enter` binding that
|
||||||
|
1. unbinds `change` event, so Ripgrep is no longer restarted on key press
|
||||||
|
2. changes the prompt to `2. fzf>`
|
||||||
|
3. enables search functionality of fzf
|
||||||
|
4. clears the current query string that was used to start Ripgrep process
|
||||||
|
5. and unbinds `alt-enter` itself as this is a one-off event
|
||||||
|
- We reverted `--color` option for customizing how the matching chunks are
|
||||||
|
displayed in the second phase
|
||||||
|
|
||||||
|
### Switching between Ripgrep mode and fzf mode
|
||||||
|
|
||||||
|
[fzf 0.30.0][0.30.0] added `rebind` action so we can "rebind" the bindings
|
||||||
|
that were previously "unbound" via `unbind`.
|
||||||
|
|
||||||
|
This is an improved version of the previous example that allows us to switch
|
||||||
|
between Ripgrep launcher mode and fzf-only filtering mode via CTRL-R and
|
||||||
|
CTRL-F.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Switch between Ripgrep launcher mode (CTRL-R) and fzf filtering mode (CTRL-F)
|
||||||
|
rm -f /tmp/rg-fzf-{r,f}
|
||||||
|
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||||
|
INITIAL_QUERY="${*:-}"
|
||||||
|
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||||
|
--bind "start:reload($RG_PREFIX {q})+unbind(ctrl-r)" \
|
||||||
|
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||||
|
--bind "ctrl-f:unbind(change,ctrl-f)+change-prompt(2. fzf> )+enable-search+rebind(ctrl-r)+transform-query(echo {q} > /tmp/rg-fzf-r; cat /tmp/rg-fzf-f)" \
|
||||||
|
--bind "ctrl-r:unbind(ctrl-r)+change-prompt(1. ripgrep> )+disable-search+reload($RG_PREFIX {q} || true)+rebind(change,ctrl-f)+transform-query(echo {q} > /tmp/rg-fzf-f; cat /tmp/rg-fzf-r)" \
|
||||||
|
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||||
|
--prompt '1. ripgrep> ' \
|
||||||
|
--delimiter : \
|
||||||
|
--header '╱ CTRL-R (ripgrep mode) ╱ CTRL-F (fzf mode) ╱' \
|
||||||
|
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||||
|
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||||
|
--bind 'enter:become(vim {1} +{2})'
|
||||||
|
```
|
||||||
|
|
||||||
|
- To restore the query string when switching between modes, we store the
|
||||||
|
current query in `/tmp/rg-fzf-{r,f}` files and restore the query using
|
||||||
|
`transform-query` action which was added in [fzf 0.36.0][0.36.0].
|
||||||
|
- Also note that we unbind `ctrl-r` binding on `start` event which is
|
||||||
|
triggered once when fzf starts.
|
||||||
|
|
||||||
|
[0.30.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0300
|
||||||
|
[0.36.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0360
|
||||||
|
|
||||||
|
### Switching between Ripgrep mode and fzf mode using a single key binding
|
||||||
|
|
||||||
|
In contrast to the previous version, we use just one hotkey to toggle between
|
||||||
|
ripgrep and fzf mode. This is achieved by using the `$FZF_PROMPT` as a state
|
||||||
|
within the `transform` action, a feature introduced in [fzf 0.45.0][0.45.0]. A
|
||||||
|
more detailed explanation of this feature can be found in a previous section -
|
||||||
|
[Toggling with a single keybinding](#toggling-with-a-single-key-binding).
|
||||||
|
|
||||||
|
[0.45.0]: https://github.com/junegunn/fzf/blob/master/CHANGELOG.md#0450
|
||||||
|
|
||||||
|
When using the `transform` action, the placeholder (`\{q}`) should be escaped to
|
||||||
|
prevent immediate evaluation.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Switch between Ripgrep mode and fzf filtering mode (CTRL-T)
|
||||||
|
rm -f /tmp/rg-fzf-{r,f}
|
||||||
|
RG_PREFIX="rg --column --line-number --no-heading --color=always --smart-case "
|
||||||
|
INITIAL_QUERY="${*:-}"
|
||||||
|
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||||
|
--bind "start:reload:$RG_PREFIX {q}" \
|
||||||
|
--bind "change:reload:sleep 0.1; $RG_PREFIX {q} || true" \
|
||||||
|
--bind 'ctrl-t:transform:[[ ! $FZF_PROMPT =~ ripgrep ]] &&
|
||||||
|
echo "rebind(change)+change-prompt(1. ripgrep> )+disable-search+transform-query:echo \{q} > /tmp/rg-fzf-f; cat /tmp/rg-fzf-r" ||
|
||||||
|
echo "unbind(change)+change-prompt(2. fzf> )+enable-search+transform-query:echo \{q} > /tmp/rg-fzf-r; cat /tmp/rg-fzf-f"' \
|
||||||
|
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||||
|
--prompt '1. ripgrep> ' \
|
||||||
|
--delimiter : \
|
||||||
|
--header 'CTRL-T: Switch between ripgrep/fzf' \
|
||||||
|
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||||
|
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
|
||||||
|
--bind 'enter:become(vim {1} +{2})'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Controlling Ripgrep search and fzf search simultaneously
|
||||||
|
|
||||||
|
`search` and `transform-search` action allow you to trigger an fzf search with
|
||||||
|
an arbitrary query string. This frees fzf from strictly following the prompt
|
||||||
|
input, enabling custom search syntax.
|
||||||
|
|
||||||
|
In the example below, `transform` action is used to conditionally trigger
|
||||||
|
`reload` for ripgrep, followed by `search` for fzf. The first word of the
|
||||||
|
query initiates the Ripgrep process to generate the initial results, while the
|
||||||
|
remainder of the query is passed to fzf for secondary filtering.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
export TEMP=$(mktemp -u)
|
||||||
|
trap 'rm -f "$TEMP"' EXIT
|
||||||
|
|
||||||
|
INITIAL_QUERY="${*:-}"
|
||||||
|
TRANSFORMER='
|
||||||
|
rg_pat={q:1} # The first word is passed to ripgrep
|
||||||
|
fzf_pat={q:2..} # The rest are passed to fzf
|
||||||
|
|
||||||
|
if ! [[ -r "$TEMP" ]] || [[ $rg_pat != $(cat "$TEMP") ]]; then
|
||||||
|
echo "$rg_pat" > "$TEMP"
|
||||||
|
printf "reload:sleep 0.1; rg --column --line-number --no-heading --color=always --smart-case %q || true" "$rg_pat"
|
||||||
|
fi
|
||||||
|
echo "+search:$fzf_pat"
|
||||||
|
'
|
||||||
|
fzf --ansi --disabled --query "$INITIAL_QUERY" \
|
||||||
|
--with-shell 'bash -c' \
|
||||||
|
--bind "start,change:transform:$TRANSFORMER" \
|
||||||
|
--color "hl:-1:underline,hl+:-1:underline:reverse" \
|
||||||
|
--delimiter : \
|
||||||
|
--preview 'bat --color=always {1} --highlight-line {2}' \
|
||||||
|
--preview-window 'up,60%,border-line,+{2}+3/3,~3' \
|
||||||
|
--bind 'enter:become(vim {1} +{2})'
|
||||||
|
```
|
||||||
|
|
||||||
|
Log tailing
|
||||||
|
-----------
|
||||||
|
|
||||||
|
fzf can run long-running preview commands and render partial results before
|
||||||
|
completion. And when you specify `follow` flag in `--preview-window` option,
|
||||||
|
fzf will "`tail -f`" the result, automatically scrolling to the bottom.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# With "follow", preview window will automatically scroll to the bottom.
|
||||||
|
# "\033[2J" is an ANSI escape sequence for clearing the screen.
|
||||||
|
# When fzf reads this code it clears the previous preview contents.
|
||||||
|
fzf --preview-window follow --preview 'for i in $(seq 100000); do
|
||||||
|
echo "$i"
|
||||||
|
sleep 0.01
|
||||||
|
(( i % 300 == 0 )) && printf "\033[2J"
|
||||||
|
done'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Admittedly, that was a silly example. Here's a practical one for browsing
|
||||||
|
Kubernetes pods.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pods() {
|
||||||
|
command='kubectl get pods --all-namespaces' fzf \
|
||||||
|
--info=inline --layout=reverse --header-lines=1 \
|
||||||
|
--prompt "$(kubectl config current-context | sed 's/-context$//')> " \
|
||||||
|
--header $'╱ Enter (kubectl exec) ╱ CTRL-O (open log in editor) ╱ CTRL-R (reload) ╱\n\n' \
|
||||||
|
--bind 'start,ctrl-r:reload:$command' \
|
||||||
|
--bind 'ctrl-/:change-preview-window(80%,border-bottom|hidden|)' \
|
||||||
|
--bind 'enter:execute:kubectl exec -it --namespace {1} {2} -- bash' \
|
||||||
|
--bind 'ctrl-o:execute:${EDITOR:-vim} <(kubectl logs --all-containers --namespace {1} {2})' \
|
||||||
|
--preview-window up:follow \
|
||||||
|
--preview 'kubectl logs --follow --all-containers --tail=10000 --namespace {1} {2}' "$@"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
- The preview window will *"log tail"* the pod
|
||||||
|
- Holding on to a large amount of log will consume a lot of memory. So we
|
||||||
|
limited the initial log amount with `--tail=10000`.
|
||||||
|
- `execute` bindings allow you to run any command without leaving fzf
|
||||||
|
- Press enter key on a pod to `kubectl exec` into it
|
||||||
|
- Press CTRL-O to open the log in your editor
|
||||||
|
- Press CTRL-R to reload the pod list
|
||||||
|
- Press CTRL-/ repeatedly to rotate through a different sets of preview
|
||||||
|
window options
|
||||||
|
1. `80%,border-bottom`
|
||||||
|
1. `hidden`
|
||||||
|
1. Empty string after `|` translates to the default options from `--preview-window`
|
||||||
|
|
||||||
|
Key bindings for git objects
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Oftentimes, you want to put the identifiers of various Git object to the
|
||||||
|
command-line. For example, it is common to write commands like these:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git checkout [SOME_COMMIT_HASH or BRANCH or TAG]
|
||||||
|
git diff [SOME_COMMIT_HASH or BRANCH or TAG] [SOME_COMMIT_HASH or BRANCH or TAG]
|
||||||
|
```
|
||||||
|
|
||||||
|
[fzf-git.sh](https://github.com/junegunn/fzf-git.sh) project defines a set of
|
||||||
|
fzf-based key bindings for Git objects. I strongly recommend that you check
|
||||||
|
them out because they are seriously useful.
|
||||||
|
|
||||||
|
### Files listed in `git status`
|
||||||
|
|
||||||
|
<kbd>CTRL-G</kbd><kbd>CTRL-F</kbd>
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### Branches
|
||||||
|
|
||||||
|
<kbd>CTRL-G</kbd><kbd>CTRL-B</kbd>
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### Commit hashes
|
||||||
|
|
||||||
|
<kbd>CTRL-G</kbd><kbd>CTRL-H</kbd>
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Color themes
|
||||||
|
------------
|
||||||
|
|
||||||
|
You can customize how fzf colors the text elements with `--color` option. Here
|
||||||
|
are a few color themes. Note that you need a terminal emulator that can
|
||||||
|
display 24-bit colors.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# junegunn/seoul256.vim (dark)
|
||||||
|
export FZF_DEFAULT_OPTS='--color=bg+:#3F3F3F,bg:#4B4B4B,border:#6B6B6B,spinner:#98BC99,hl:#719872,fg:#D9D9D9,header:#719872,info:#BDBB72,pointer:#E12672,marker:#E17899,fg+:#D9D9D9,preview-bg:#3F3F3F,prompt:#98BEDE,hl+:#98BC99'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
```sh
|
||||||
|
# junegunn/seoul256.vim (light)
|
||||||
|
export FZF_DEFAULT_OPTS='--color=bg+:#D9D9D9,bg:#E1E1E1,border:#C8C8C8,spinner:#719899,hl:#719872,fg:#616161,header:#719872,info:#727100,pointer:#E12672,marker:#E17899,fg+:#616161,preview-bg:#D9D9D9,prompt:#0099BD,hl+:#719899'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
```sh
|
||||||
|
# morhetz/gruvbox
|
||||||
|
export FZF_DEFAULT_OPTS='--color=bg+:#3c3836,bg:#32302f,spinner:#fb4934,hl:#928374,fg:#ebdbb2,header:#928374,info:#8ec07c,pointer:#fb4934,marker:#fb4934,fg+:#ebdbb2,prompt:#fb4934,hl+:#fb4934'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
```sh
|
||||||
|
# arcticicestudio/nord-vim
|
||||||
|
export FZF_DEFAULT_OPTS='--color=bg+:#3B4252,bg:#2E3440,spinner:#81A1C1,hl:#616E88,fg:#D8DEE9,header:#616E88,info:#81A1C1,pointer:#81A1C1,marker:#81A1C1,fg+:#D8DEE9,prompt:#81A1C1,hl+:#81A1C1'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
```sh
|
||||||
|
# tomasr/molokai
|
||||||
|
export FZF_DEFAULT_OPTS='--color=bg+:#293739,bg:#1B1D1E,border:#808080,spinner:#E6DB74,hl:#7E8E91,fg:#F8F8F2,header:#7E8E91,info:#A6E22E,pointer:#A6E22E,marker:#F92672,fg+:#F8F8F2,prompt:#F92672,hl+:#F92672'
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### fzf Theme Playground
|
||||||
|
|
||||||
|
[fzf Theme Playground](https://vitormv.github.io/fzf-themes/) created by
|
||||||
|
[Vitor Mello](https://github.com/vitormv) is a webpage where you can
|
||||||
|
interactively create fzf themes.
|
||||||
|
|
||||||
|
### Generating fzf color theme from Vim color schemes
|
||||||
|
|
||||||
|
The Vim plugin of fzf can generate `--color` option from the current color
|
||||||
|
scheme according to `g:fzf_colors` variable. You can find the detailed
|
||||||
|
explanation [here](https://github.com/junegunn/fzf/blob/master/README-VIM.md#explanation-of-gfzf_colors).
|
||||||
|
|
||||||
|
Here is an example. Add this to your Vim configuration file.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
let g:fzf_colors =
|
||||||
|
\ { 'fg': ['fg', 'Normal'],
|
||||||
|
\ 'bg': ['bg', 'Normal'],
|
||||||
|
\ 'preview-bg': ['bg', 'NormalFloat'],
|
||||||
|
\ 'hl': ['fg', 'Comment'],
|
||||||
|
\ 'fg+': ['fg', 'CursorLine', 'CursorColumn', 'Normal'],
|
||||||
|
\ 'bg+': ['bg', 'CursorLine', 'CursorColumn'],
|
||||||
|
\ 'hl+': ['fg', 'Statement'],
|
||||||
|
\ 'info': ['fg', 'PreProc'],
|
||||||
|
\ 'border': ['fg', 'Ignore'],
|
||||||
|
\ 'prompt': ['fg', 'Conditional'],
|
||||||
|
\ 'pointer': ['fg', 'Exception'],
|
||||||
|
\ 'marker': ['fg', 'Keyword'],
|
||||||
|
\ 'spinner': ['fg', 'Label'],
|
||||||
|
\ 'header': ['fg', 'Comment'] }
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you can see how the `--color` option is generated by printing the result
|
||||||
|
of `fzf#wrap()`.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
:echo fzf#wrap()
|
||||||
|
```
|
||||||
|
|
||||||
|
Use this command to append `export FZF_DEFAULT_OPTS="..."` line to the end of
|
||||||
|
the current file.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
:call append('$', printf('export FZF_DEFAULT_OPTS="%s"', matchstr(fzf#wrap().options, "--color[^']*")))
|
||||||
|
```
|
||||||
99
BUILD.md
99
BUILD.md
@@ -6,101 +6,70 @@ Build instructions
|
|||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
- `go` executable in $PATH
|
- Go 1.23 or above
|
||||||
|
|
||||||
### Using Makefile
|
### Using Makefile
|
||||||
|
|
||||||
Makefile will set up and use its own `$GOPATH` under the project root.
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Source files are located in src directory
|
# Build fzf binary for your platform in target
|
||||||
cd src
|
|
||||||
|
|
||||||
# Build fzf binary for your platform in src/fzf
|
|
||||||
make
|
make
|
||||||
|
|
||||||
# Build fzf binary and copy it to bin directory
|
# Build fzf binary and copy it to bin directory
|
||||||
make install
|
make install
|
||||||
|
|
||||||
# Build 32-bit and 64-bit executables and tarballs
|
# Build fzf binaries and archives for all platforms using goreleaser
|
||||||
|
make build
|
||||||
|
|
||||||
|
# Publish GitHub release
|
||||||
make release
|
make release
|
||||||
|
|
||||||
# Build executables and tarballs for Linux using Docker
|
|
||||||
make linux
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Using `go get`
|
> [!WARNING]
|
||||||
|
> Makefile uses git commands to determine the version and the revision
|
||||||
|
> information for `fzf --version`. So if you're building fzf from an
|
||||||
|
> environment where its git information is not available, you have to manually
|
||||||
|
> set `$FZF_VERSION` and `$FZF_REVISION`.
|
||||||
|
>
|
||||||
|
> e.g. `FZF_VERSION=0.24.0 FZF_REVISION=tarball make`
|
||||||
|
|
||||||
Alternatively, you can build fzf directly with `go get` command without
|
> [!TIP]
|
||||||
cloning the repository.
|
> To build fzf with profiling options enabled, set `TAGS=pprof`
|
||||||
|
>
|
||||||
|
> ```sh
|
||||||
|
> TAGS=pprof make clean install
|
||||||
|
> fzf --profile-cpu /tmp/cpu.pprof --profile-mem /tmp/mem.pprof \
|
||||||
|
> --profile-block /tmp/block.pprof --profile-mutex /tmp/mutex.pprof
|
||||||
|
> ```
|
||||||
|
|
||||||
```sh
|
Running tests
|
||||||
go get -u github.com/junegunn/fzf/src/fzf
|
|
||||||
```
|
|
||||||
|
|
||||||
Build options
|
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
### With ncurses 6
|
|
||||||
|
|
||||||
The official binaries of fzf are built with ncurses 5 because it's widely
|
|
||||||
supported by different platforms. However ncurses 5 is old and has a number of
|
|
||||||
limitations.
|
|
||||||
|
|
||||||
1. Does not support more than 256 color pairs (See [357][357])
|
|
||||||
2. Does not support italics
|
|
||||||
3. Does not support 24-bit color
|
|
||||||
|
|
||||||
[357]: https://github.com/junegunn/fzf/issues/357
|
|
||||||
|
|
||||||
But you can manually build fzf with ncurses 6 to overcome some of these
|
|
||||||
limitations. ncurses 6 supports up to 32767 color pairs (1), and supports
|
|
||||||
italics (2). To build fzf with ncurses 6, you have to install it first. On
|
|
||||||
macOS, you can use Homebrew to install it.
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
brew install homebrew/dupes/ncurses
|
# Run go unit tests
|
||||||
LDFLAGS="-L/usr/local/opt/ncurses/lib" make install
|
make test
|
||||||
|
|
||||||
|
# Run integration tests (requires to be on tmux)
|
||||||
|
make itest
|
||||||
|
|
||||||
|
# Run a single test case
|
||||||
|
ruby test/runner.rb --name test_something
|
||||||
```
|
```
|
||||||
|
|
||||||
### With tcell
|
|
||||||
|
|
||||||
[tcell][tcell] is a portable alternative to ncurses and we currently use it to
|
|
||||||
build Windows binaries. tcell has many benefits but most importantly, it
|
|
||||||
supports 24-bit colors. To build fzf with tcell:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
TAGS=tcell make install
|
|
||||||
```
|
|
||||||
|
|
||||||
However, note that tcell has its own issues.
|
|
||||||
|
|
||||||
- Poor rendering performance compared to ncurses
|
|
||||||
- Does not support bracketed-paste mode
|
|
||||||
- Does not support italics unlike ncurses 6
|
|
||||||
- Some wide characters are not correctly displayed
|
|
||||||
|
|
||||||
Third-party libraries used
|
Third-party libraries used
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
- [ncurses][ncurses]
|
- [rivo/uniseg](https://github.com/rivo/uniseg)
|
||||||
- [mattn/go-runewidth](https://github.com/mattn/go-runewidth)
|
- Licensed under [MIT](https://raw.githubusercontent.com/rivo/uniseg/master/LICENSE.txt)
|
||||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
|
||||||
- [mattn/go-shellwords](https://github.com/mattn/go-shellwords)
|
- [mattn/go-shellwords](https://github.com/mattn/go-shellwords)
|
||||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
- Licensed under [MIT](http://mattn.mit-license.org)
|
||||||
- [mattn/go-isatty](https://github.com/mattn/go-isatty)
|
- [mattn/go-isatty](https://github.com/mattn/go-isatty)
|
||||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
- Licensed under [MIT](http://mattn.mit-license.org)
|
||||||
- [tcell](https://github.com/gdamore/tcell)
|
- [tcell](https://github.com/gdamore/tcell)
|
||||||
- Licensed under [Apache License 2.0](https://github.com/gdamore/tcell/blob/master/LICENSE)
|
- Licensed under [Apache License 2.0](https://github.com/gdamore/tcell/blob/master/LICENSE)
|
||||||
|
- [fastwalk](https://github.com/charlievieth/fastwalk)
|
||||||
|
- Licensed under [MIT](https://raw.githubusercontent.com/charlievieth/fastwalk/master/LICENSE)
|
||||||
|
|
||||||
License
|
License
|
||||||
-------
|
-------
|
||||||
|
|
||||||
[MIT](LICENSE)
|
[MIT](LICENSE)
|
||||||
|
|
||||||
[install]: https://github.com/junegunn/fzf#installation
|
|
||||||
[go]: https://golang.org/
|
|
||||||
[gil]: http://en.wikipedia.org/wiki/Global_Interpreter_Lock
|
|
||||||
[ncurses]: https://www.gnu.org/software/ncurses/
|
|
||||||
[req]: http://golang.org/doc/install
|
|
||||||
[tcell]: https://github.com/gdamore/tcell
|
|
||||||
|
|||||||
2522
CHANGELOG.md
2522
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
12
Dockerfile
Normal file
12
Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
FROM rubylang/ruby:3.4.1-noble
|
||||||
|
RUN apt-get update -y && apt install -y git make golang zsh fish tmux
|
||||||
|
RUN gem install --no-document -v 5.22.3 minitest
|
||||||
|
RUN echo '. /usr/share/bash-completion/completions/git' >> ~/.bashrc
|
||||||
|
RUN echo '. ~/.bashrc' >> ~/.bash_profile
|
||||||
|
|
||||||
|
# Do not set default PS1
|
||||||
|
RUN rm -f /etc/bash.bashrc
|
||||||
|
COPY . /fzf
|
||||||
|
RUN cd /fzf && make install && ./install --all
|
||||||
|
ENV LANG=C.UTF-8
|
||||||
|
CMD ["bash", "-ic", "tmux new 'set -o pipefail; ruby /fzf/test/runner.rb | tee out && touch ok' && cat out && [ -e ok ]"]
|
||||||
8
Gemfile
Normal file
8
Gemfile
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
source 'https://rubygems.org'
|
||||||
|
|
||||||
|
gem 'minitest', '5.25.4'
|
||||||
|
gem 'rubocop', '1.71.0'
|
||||||
|
gem 'rubocop-minitest', '0.36.0'
|
||||||
|
gem 'rubocop-performance', '1.23.1'
|
||||||
47
Gemfile.lock
Normal file
47
Gemfile.lock
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
GEM
|
||||||
|
remote: https://rubygems.org/
|
||||||
|
specs:
|
||||||
|
ast (2.4.2)
|
||||||
|
json (2.9.1)
|
||||||
|
language_server-protocol (3.17.0.3)
|
||||||
|
minitest (5.25.4)
|
||||||
|
parallel (1.26.3)
|
||||||
|
parser (3.3.7.0)
|
||||||
|
ast (~> 2.4.1)
|
||||||
|
racc
|
||||||
|
racc (1.8.1)
|
||||||
|
rainbow (3.1.1)
|
||||||
|
regexp_parser (2.10.0)
|
||||||
|
rubocop (1.71.0)
|
||||||
|
json (~> 2.3)
|
||||||
|
language_server-protocol (>= 3.17.0)
|
||||||
|
parallel (~> 1.10)
|
||||||
|
parser (>= 3.3.0.2)
|
||||||
|
rainbow (>= 2.2.2, < 4.0)
|
||||||
|
regexp_parser (>= 2.9.3, < 3.0)
|
||||||
|
rubocop-ast (>= 1.36.2, < 2.0)
|
||||||
|
ruby-progressbar (~> 1.7)
|
||||||
|
unicode-display_width (>= 2.4.0, < 4.0)
|
||||||
|
rubocop-ast (1.37.0)
|
||||||
|
parser (>= 3.3.1.0)
|
||||||
|
rubocop-minitest (0.36.0)
|
||||||
|
rubocop (>= 1.61, < 2.0)
|
||||||
|
rubocop-ast (>= 1.31.1, < 2.0)
|
||||||
|
rubocop-performance (1.23.1)
|
||||||
|
rubocop (>= 1.48.1, < 2.0)
|
||||||
|
rubocop-ast (>= 1.31.1, < 2.0)
|
||||||
|
ruby-progressbar (1.13.0)
|
||||||
|
unicode-display_width (2.6.0)
|
||||||
|
|
||||||
|
PLATFORMS
|
||||||
|
arm64-darwin-23
|
||||||
|
ruby
|
||||||
|
|
||||||
|
DEPENDENCIES
|
||||||
|
minitest (= 5.25.4)
|
||||||
|
rubocop (= 1.71.0)
|
||||||
|
rubocop-minitest (= 0.36.0)
|
||||||
|
rubocop-performance (= 1.23.1)
|
||||||
|
|
||||||
|
BUNDLED WITH
|
||||||
|
2.6.2
|
||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2016 Junegunn Choi
|
Copyright (c) 2013-2025 Junegunn Choi
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
206
Makefile
Normal file
206
Makefile
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
GO ?= go
|
||||||
|
GOOS ?= $(shell $(GO) env GOOS)
|
||||||
|
|
||||||
|
MAKEFILE := $(realpath $(lastword $(MAKEFILE_LIST)))
|
||||||
|
ROOT_DIR := $(shell dirname $(MAKEFILE))
|
||||||
|
SOURCES := $(wildcard *.go src/*.go src/*/*.go shell/*sh man/man1/*.1) $(MAKEFILE)
|
||||||
|
|
||||||
|
BASH_SCRIPTS := $(ROOT_DIR)/bin/fzf-preview.sh \
|
||||||
|
$(ROOT_DIR)/bin/fzf-tmux \
|
||||||
|
$(ROOT_DIR)/install \
|
||||||
|
$(ROOT_DIR)/uninstall \
|
||||||
|
$(ROOT_DIR)/shell/common.sh \
|
||||||
|
$(ROOT_DIR)/shell/update.sh \
|
||||||
|
$(ROOT_DIR)/shell/completion.bash \
|
||||||
|
$(ROOT_DIR)/shell/key-bindings.bash
|
||||||
|
|
||||||
|
ifdef FZF_VERSION
|
||||||
|
VERSION := $(FZF_VERSION)
|
||||||
|
else
|
||||||
|
VERSION := $(shell git describe --abbrev=0 2> /dev/null | sed "s/^v//")
|
||||||
|
endif
|
||||||
|
ifeq ($(VERSION),)
|
||||||
|
$(error Not on git repository; cannot determine $$FZF_VERSION)
|
||||||
|
endif
|
||||||
|
VERSION_TRIM := $(shell echo $(VERSION) | sed "s/^v//; s/-.*//")
|
||||||
|
VERSION_REGEX := $(subst .,\.,$(VERSION_TRIM))
|
||||||
|
|
||||||
|
ifdef FZF_REVISION
|
||||||
|
REVISION := $(FZF_REVISION)
|
||||||
|
else
|
||||||
|
REVISION := $(shell git log -n 1 --pretty=format:%h --abbrev=8 -- $(SOURCES) 2> /dev/null)
|
||||||
|
endif
|
||||||
|
ifeq ($(REVISION),)
|
||||||
|
$(error Not on git repository; cannot determine $$FZF_REVISION)
|
||||||
|
endif
|
||||||
|
BUILD_FLAGS := -a -ldflags "-s -w -X main.version=$(VERSION) -X main.revision=$(REVISION)" -tags "$(TAGS)" -trimpath
|
||||||
|
|
||||||
|
BINARY32 := fzf-$(GOOS)_386
|
||||||
|
BINARY64 := fzf-$(GOOS)_amd64
|
||||||
|
BINARYS390 := fzf-$(GOOS)_s390x
|
||||||
|
BINARYARM5 := fzf-$(GOOS)_arm5
|
||||||
|
BINARYARM6 := fzf-$(GOOS)_arm6
|
||||||
|
BINARYARM7 := fzf-$(GOOS)_arm7
|
||||||
|
BINARYARM8 := fzf-$(GOOS)_arm8
|
||||||
|
BINARYPPC64LE := fzf-$(GOOS)_ppc64le
|
||||||
|
BINARYRISCV64 := fzf-$(GOOS)_riscv64
|
||||||
|
BINARYLOONG64 := fzf-$(GOOS)_loong64
|
||||||
|
|
||||||
|
# https://en.wikipedia.org/wiki/Uname
|
||||||
|
UNAME_M := $(shell uname -m)
|
||||||
|
ifeq ($(UNAME_M),x86_64)
|
||||||
|
BINARY := $(BINARY64)
|
||||||
|
else ifeq ($(UNAME_M),amd64)
|
||||||
|
BINARY := $(BINARY64)
|
||||||
|
else ifeq ($(UNAME_M),s390x)
|
||||||
|
BINARY := $(BINARYS390)
|
||||||
|
else ifeq ($(UNAME_M),i686)
|
||||||
|
BINARY := $(BINARY32)
|
||||||
|
else ifeq ($(UNAME_M),i386)
|
||||||
|
BINARY := $(BINARY32)
|
||||||
|
else ifeq ($(UNAME_M),armv5l)
|
||||||
|
BINARY := $(BINARYARM5)
|
||||||
|
else ifeq ($(UNAME_M),armv6l)
|
||||||
|
BINARY := $(BINARYARM6)
|
||||||
|
else ifeq ($(UNAME_M),armv7l)
|
||||||
|
BINARY := $(BINARYARM7)
|
||||||
|
else ifeq ($(UNAME_M),armv8l)
|
||||||
|
# armv8l is always 32-bit and should implement the armv7 ISA, so
|
||||||
|
# just use the same filename as for armv7.
|
||||||
|
BINARY := $(BINARYARM7)
|
||||||
|
else ifeq ($(UNAME_M),arm64)
|
||||||
|
BINARY := $(BINARYARM8)
|
||||||
|
else ifeq ($(UNAME_M),aarch64)
|
||||||
|
BINARY := $(BINARYARM8)
|
||||||
|
else ifeq ($(UNAME_M),ppc64le)
|
||||||
|
BINARY := $(BINARYPPC64LE)
|
||||||
|
else ifeq ($(UNAME_M),riscv64)
|
||||||
|
BINARY := $(BINARYRISCV64)
|
||||||
|
else ifeq ($(UNAME_M),loongarch64)
|
||||||
|
BINARY := $(BINARYLOONG64)
|
||||||
|
else
|
||||||
|
$(error Build on $(UNAME_M) is not supported, yet.)
|
||||||
|
endif
|
||||||
|
|
||||||
|
all: target/$(BINARY)
|
||||||
|
|
||||||
|
test: $(SOURCES)
|
||||||
|
SHELL=/bin/sh GOOS= $(GO) test -v -tags "$(TAGS)" \
|
||||||
|
github.com/junegunn/fzf/src \
|
||||||
|
github.com/junegunn/fzf/src/algo \
|
||||||
|
github.com/junegunn/fzf/src/tui \
|
||||||
|
github.com/junegunn/fzf/src/util
|
||||||
|
|
||||||
|
itest:
|
||||||
|
ruby test/runner.rb
|
||||||
|
|
||||||
|
bench:
|
||||||
|
cd src && SHELL=/bin/sh GOOS= $(GO) test -v -tags "$(TAGS)" -run=Bench -bench=. -benchmem
|
||||||
|
|
||||||
|
lint: $(SOURCES) test/*.rb test/lib/*.rb ${BASH_SCRIPTS}
|
||||||
|
[ -z "$$(gofmt -s -d src)" ] || (gofmt -s -d src; exit 1)
|
||||||
|
bundle exec rubocop -a --require rubocop-minitest --require rubocop-performance
|
||||||
|
shell/update.sh --check ${BASH_SCRIPTS}
|
||||||
|
|
||||||
|
fmt: $(SOURCES) $(BASH_SCRIPTS)
|
||||||
|
gofmt -s -w src
|
||||||
|
shell/update.sh ${BASH_SCRIPTS}
|
||||||
|
|
||||||
|
install: bin/fzf
|
||||||
|
|
||||||
|
generate:
|
||||||
|
PATH=$(PATH):$(GOPATH)/bin $(GO) generate ./...
|
||||||
|
|
||||||
|
build:
|
||||||
|
goreleaser build --clean --snapshot --skip=post-hooks
|
||||||
|
|
||||||
|
release:
|
||||||
|
# Make sure that the tests pass and the build works
|
||||||
|
TAGS=tcell make test
|
||||||
|
make test build clean
|
||||||
|
|
||||||
|
ifndef GITHUB_TOKEN
|
||||||
|
$(error GITHUB_TOKEN is not defined)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Check if we are on master branch
|
||||||
|
ifneq ($(shell git symbolic-ref --short HEAD),master)
|
||||||
|
$(error Not on master branch)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Check if version numbers are properly updated
|
||||||
|
grep -q ^$(VERSION_REGEX)$$ CHANGELOG.md
|
||||||
|
grep -qF '"fzf $(VERSION_TRIM)"' man/man1/fzf.1
|
||||||
|
grep -qF '"fzf $(VERSION_TRIM)"' man/man1/fzf-tmux.1
|
||||||
|
grep -qF $(VERSION) install
|
||||||
|
grep -qF $(VERSION) install.ps1
|
||||||
|
|
||||||
|
# Make release note out of CHANGELOG.md
|
||||||
|
mkdir -p tmp
|
||||||
|
sed -n '/^$(VERSION_REGEX)$$/,/^[0-9]/p' CHANGELOG.md | tail -r | \
|
||||||
|
sed '1,/^ *$$/d' | tail -r | sed 1,2d | tee tmp/release-note
|
||||||
|
|
||||||
|
# Push to temp branch first so that install scripts always works on master branch
|
||||||
|
git checkout -B temp master
|
||||||
|
git push origin temp --follow-tags --force
|
||||||
|
|
||||||
|
# Make a GitHub release
|
||||||
|
goreleaser --clean --release-notes tmp/release-note
|
||||||
|
|
||||||
|
# Push to master
|
||||||
|
git checkout master
|
||||||
|
git push origin master
|
||||||
|
|
||||||
|
# Delete temp branch
|
||||||
|
git push origin --delete temp
|
||||||
|
|
||||||
|
clean:
|
||||||
|
$(RM) -r dist target
|
||||||
|
|
||||||
|
target/$(BINARY32): $(SOURCES)
|
||||||
|
GOARCH=386 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARY64): $(SOURCES)
|
||||||
|
GOARCH=amd64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARYS390): $(SOURCES)
|
||||||
|
GOARCH=s390x $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
# https://github.com/golang/go/wiki/GoArm
|
||||||
|
target/$(BINARYARM5): $(SOURCES)
|
||||||
|
GOARCH=arm GOARM=5 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARYARM6): $(SOURCES)
|
||||||
|
GOARCH=arm GOARM=6 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARYARM7): $(SOURCES)
|
||||||
|
GOARCH=arm GOARM=7 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARYARM8): $(SOURCES)
|
||||||
|
GOARCH=arm64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARYPPC64LE): $(SOURCES)
|
||||||
|
GOARCH=ppc64le $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARYRISCV64): $(SOURCES)
|
||||||
|
GOARCH=riscv64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
target/$(BINARYLOONG64): $(SOURCES)
|
||||||
|
GOARCH=loong64 $(GO) build $(BUILD_FLAGS) -o $@
|
||||||
|
|
||||||
|
bin/fzf: target/$(BINARY) | bin
|
||||||
|
-rm -f bin/fzf
|
||||||
|
cp -f target/$(BINARY) bin/fzf
|
||||||
|
|
||||||
|
docker:
|
||||||
|
docker build -t fzf-ubuntu .
|
||||||
|
docker run -it fzf-ubuntu tmux
|
||||||
|
|
||||||
|
docker-test:
|
||||||
|
docker build -t fzf-ubuntu .
|
||||||
|
docker run -it fzf-ubuntu
|
||||||
|
|
||||||
|
update:
|
||||||
|
$(GO) get -u
|
||||||
|
$(GO) mod tidy
|
||||||
|
|
||||||
|
.PHONY: all generate build release test itest bench lint install clean docker docker-test update fmt
|
||||||
496
README-VIM.md
Normal file
496
README-VIM.md
Normal file
@@ -0,0 +1,496 @@
|
|||||||
|
FZF Vim integration
|
||||||
|
===================
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
Once you have fzf installed, you can enable it inside Vim simply by adding the
|
||||||
|
directory to `&runtimepath` in your Vim configuration file. The path may
|
||||||
|
differ depending on the package manager.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" If installed using Homebrew
|
||||||
|
set rtp+=/usr/local/opt/fzf
|
||||||
|
|
||||||
|
" If installed using Homebrew on Apple Silicon
|
||||||
|
set rtp+=/opt/homebrew/opt/fzf
|
||||||
|
|
||||||
|
" If you have cloned fzf on ~/.fzf directory
|
||||||
|
set rtp+=~/.fzf
|
||||||
|
```
|
||||||
|
|
||||||
|
If you use [vim-plug](https://github.com/junegunn/vim-plug), the same can be
|
||||||
|
written as:
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" If installed using Homebrew
|
||||||
|
Plug '/usr/local/opt/fzf'
|
||||||
|
|
||||||
|
" If installed using Homebrew on Apple Silicon
|
||||||
|
Plug '/opt/homebrew/opt/fzf'
|
||||||
|
|
||||||
|
" If you have cloned fzf on ~/.fzf directory
|
||||||
|
Plug '~/.fzf'
|
||||||
|
```
|
||||||
|
|
||||||
|
But if you want the latest Vim plugin file from GitHub rather than the one
|
||||||
|
included in the package, write:
|
||||||
|
|
||||||
|
```vim
|
||||||
|
Plug 'junegunn/fzf'
|
||||||
|
```
|
||||||
|
|
||||||
|
The Vim plugin will pick up fzf binary available on the system. If fzf is not
|
||||||
|
found on `$PATH`, it will ask you if it should download the latest binary for
|
||||||
|
you.
|
||||||
|
|
||||||
|
To make sure that you have the latest version of the binary, set up
|
||||||
|
post-update hook like so:
|
||||||
|
|
||||||
|
```vim
|
||||||
|
Plug 'junegunn/fzf', { 'do': { -> fzf#install() } }
|
||||||
|
```
|
||||||
|
|
||||||
|
Summary
|
||||||
|
-------
|
||||||
|
|
||||||
|
The Vim plugin of fzf provides two core functions, and `:FZF` command which is
|
||||||
|
the basic file selector command built on top of them.
|
||||||
|
|
||||||
|
1. **`fzf#run([spec dict])`**
|
||||||
|
- Starts fzf inside Vim with the given spec
|
||||||
|
- `:call fzf#run({'source': 'ls'})`
|
||||||
|
2. **`fzf#wrap([spec dict]) -> (dict)`**
|
||||||
|
- Takes a spec for `fzf#run` and returns an extended version of it with
|
||||||
|
additional options for addressing global preferences (`g:fzf_xxx`)
|
||||||
|
- `:echo fzf#wrap({'source': 'ls'})`
|
||||||
|
- We usually *wrap* a spec with `fzf#wrap` before passing it to `fzf#run`
|
||||||
|
- `:call fzf#run(fzf#wrap({'source': 'ls'}))`
|
||||||
|
3. **`:FZF [fzf_options string] [path string]`**
|
||||||
|
- Basic fuzzy file selector
|
||||||
|
- A reference implementation for those who don't want to write VimScript
|
||||||
|
to implement custom commands
|
||||||
|
- If you're looking for more such commands, check out [fzf.vim](https://github.com/junegunn/fzf.vim) project.
|
||||||
|
|
||||||
|
The most important of all is `fzf#run`, but it would be easier to understand
|
||||||
|
the whole if we start off with `:FZF` command.
|
||||||
|
|
||||||
|
`:FZF[!]`
|
||||||
|
---------
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" Look for files under current directory
|
||||||
|
:FZF
|
||||||
|
|
||||||
|
" Look for files under your home directory
|
||||||
|
:FZF ~
|
||||||
|
|
||||||
|
" With fzf command-line options
|
||||||
|
:FZF --reverse --info=inline /tmp
|
||||||
|
|
||||||
|
" Bang version starts fzf in fullscreen mode
|
||||||
|
:FZF!
|
||||||
|
```
|
||||||
|
|
||||||
|
Similarly to [ctrlp.vim](https://github.com/kien/ctrlp.vim), use enter key,
|
||||||
|
`CTRL-T`, `CTRL-X` or `CTRL-V` to open selected files in the current window,
|
||||||
|
in new tabs, in horizontal splits, or in vertical splits respectively.
|
||||||
|
|
||||||
|
Note that the environment variables `FZF_DEFAULT_COMMAND` and
|
||||||
|
`FZF_DEFAULT_OPTS` also apply here.
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
- `g:fzf_action`
|
||||||
|
- Customizable extra key bindings for opening selected files in different ways
|
||||||
|
- `g:fzf_layout`
|
||||||
|
- Determines the size and position of fzf window
|
||||||
|
- `g:fzf_colors`
|
||||||
|
- Customizes fzf colors to match the current color scheme
|
||||||
|
- `g:fzf_history_dir`
|
||||||
|
- Enables history feature
|
||||||
|
|
||||||
|
#### Examples
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" This is the default extra key bindings
|
||||||
|
let g:fzf_action = {
|
||||||
|
\ 'ctrl-t': 'tab split',
|
||||||
|
\ 'ctrl-x': 'split',
|
||||||
|
\ 'ctrl-v': 'vsplit' }
|
||||||
|
|
||||||
|
" An action can be a reference to a function that processes selected lines
|
||||||
|
function! s:build_quickfix_list(lines)
|
||||||
|
call setqflist(map(copy(a:lines), '{ "filename": v:val, "lnum": 1 }'))
|
||||||
|
copen
|
||||||
|
cc
|
||||||
|
endfunction
|
||||||
|
|
||||||
|
let g:fzf_action = {
|
||||||
|
\ 'ctrl-q': function('s:build_quickfix_list'),
|
||||||
|
\ 'ctrl-t': 'tab split',
|
||||||
|
\ 'ctrl-x': 'split',
|
||||||
|
\ 'ctrl-v': 'vsplit' }
|
||||||
|
|
||||||
|
" Default fzf layout
|
||||||
|
" - Popup window (center of the screen)
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||||
|
|
||||||
|
" - Popup window (center of the current window)
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true } }
|
||||||
|
|
||||||
|
" - Popup window (anchored to the bottom of the current window)
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true, 'yoffset': 1.0 } }
|
||||||
|
|
||||||
|
" - down / up / left / right
|
||||||
|
let g:fzf_layout = { 'down': '40%' }
|
||||||
|
|
||||||
|
" - Window using a Vim command
|
||||||
|
let g:fzf_layout = { 'window': 'enew' }
|
||||||
|
let g:fzf_layout = { 'window': '-tabnew' }
|
||||||
|
let g:fzf_layout = { 'window': '10new' }
|
||||||
|
|
||||||
|
" Customize fzf colors to match your color scheme
|
||||||
|
" - fzf#wrap translates this to a set of `--color` options
|
||||||
|
let g:fzf_colors =
|
||||||
|
\ { 'fg': ['fg', 'Normal'],
|
||||||
|
\ 'bg': ['bg', 'Normal'],
|
||||||
|
\ 'query': ['fg', 'Normal'],
|
||||||
|
\ 'hl': ['fg', 'Comment'],
|
||||||
|
\ 'fg+': ['fg', 'CursorLine', 'CursorColumn', 'Normal'],
|
||||||
|
\ 'bg+': ['bg', 'CursorLine', 'CursorColumn'],
|
||||||
|
\ 'hl+': ['fg', 'Statement'],
|
||||||
|
\ 'info': ['fg', 'PreProc'],
|
||||||
|
\ 'border': ['fg', 'Ignore'],
|
||||||
|
\ 'prompt': ['fg', 'Conditional'],
|
||||||
|
\ 'pointer': ['fg', 'Exception'],
|
||||||
|
\ 'marker': ['fg', 'Keyword'],
|
||||||
|
\ 'spinner': ['fg', 'Label'],
|
||||||
|
\ 'header': ['fg', 'Comment'] }
|
||||||
|
|
||||||
|
" Enable per-command history
|
||||||
|
" - History files will be stored in the specified directory
|
||||||
|
" - When set, CTRL-N and CTRL-P will be bound to 'next-history' and
|
||||||
|
" 'previous-history' instead of 'down' and 'up'.
|
||||||
|
let g:fzf_history_dir = '~/.local/share/fzf-history'
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Explanation of `g:fzf_colors`
|
||||||
|
|
||||||
|
`g:fzf_colors` is a dictionary mapping fzf elements to a color specification
|
||||||
|
list:
|
||||||
|
|
||||||
|
element: [ component, group1 [, group2, ...] ]
|
||||||
|
|
||||||
|
- `element` is an fzf element to apply a color to:
|
||||||
|
|
||||||
|
| Element | Description |
|
||||||
|
| --- | --- |
|
||||||
|
| `fg` / `bg` / `hl` | Item (foreground / background / highlight) |
|
||||||
|
| `fg+` / `bg+` / `hl+` | Current item (foreground / background / highlight) |
|
||||||
|
| `preview-fg` / `preview-bg` | Preview window text and background |
|
||||||
|
| `hl` / `hl+` | Highlighted substrings (normal / current) |
|
||||||
|
| `gutter` | Background of the gutter on the left |
|
||||||
|
| `pointer` | Pointer to the current line (`>`) |
|
||||||
|
| `marker` | Multi-select marker (`>`) |
|
||||||
|
| `border` | Border around the window (`--border` and `--preview`) |
|
||||||
|
| `header` | Header (`--header` or `--header-lines`) |
|
||||||
|
| `info` | Info line (match counters) |
|
||||||
|
| `spinner` | Streaming input indicator |
|
||||||
|
| `query` | Query string |
|
||||||
|
| `disabled` | Query string when search is disabled |
|
||||||
|
| `prompt` | Prompt before query (`> `) |
|
||||||
|
| `pointer` | Pointer to the current line (`>`) |
|
||||||
|
|
||||||
|
- `component` specifies the component (`fg` / `bg`) from which to extract the
|
||||||
|
color when considering each of the following highlight groups
|
||||||
|
|
||||||
|
- `group1 [, group2, ...]` is a list of highlight groups that are searched (in
|
||||||
|
order) for a matching color definition
|
||||||
|
|
||||||
|
For example, consider the following specification:
|
||||||
|
|
||||||
|
```vim
|
||||||
|
'prompt': ['fg', 'Conditional', 'Comment'],
|
||||||
|
```
|
||||||
|
|
||||||
|
This means we color the **prompt**
|
||||||
|
- using the `fg` attribute of the `Conditional` if it exists,
|
||||||
|
- otherwise use the `fg` attribute of the `Comment` highlight group if it exists,
|
||||||
|
- otherwise fall back to the default color settings for the **prompt**.
|
||||||
|
|
||||||
|
You can examine the color option generated according the setting by printing
|
||||||
|
the result of `fzf#wrap()` function like so:
|
||||||
|
|
||||||
|
```vim
|
||||||
|
:echo fzf#wrap()
|
||||||
|
```
|
||||||
|
|
||||||
|
`fzf#run`
|
||||||
|
---------
|
||||||
|
|
||||||
|
`fzf#run()` function is the core of Vim integration. It takes a single
|
||||||
|
dictionary argument, *a spec*, and starts fzf process accordingly. At the very
|
||||||
|
least, specify `sink` option to tell what it should do with the selected
|
||||||
|
entry.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
call fzf#run({'sink': 'e'})
|
||||||
|
```
|
||||||
|
|
||||||
|
We haven't specified the `source`, so this is equivalent to starting fzf on
|
||||||
|
command line without standard input pipe; fzf will traverse the file system
|
||||||
|
under the current directory to get the list of files. (If
|
||||||
|
`$FZF_DEFAULT_COMMAND` is set, fzf will use the output of the command
|
||||||
|
instead.) When you select one, it will open it with the sink, `:e` command. If
|
||||||
|
you want to open it in a new tab, you can pass `:tabedit` command instead as
|
||||||
|
the sink.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
call fzf#run({'sink': 'tabedit'})
|
||||||
|
```
|
||||||
|
|
||||||
|
You can use any shell command as the source to generate the list. The
|
||||||
|
following example will list the files managed by git. It's equivalent to
|
||||||
|
running `git ls-files | fzf` on shell.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
call fzf#run({'source': 'git ls-files', 'sink': 'e'})
|
||||||
|
```
|
||||||
|
|
||||||
|
fzf options can be specified as `options` entry in spec dictionary.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
call fzf#run({'sink': 'tabedit', 'options': '--multi --reverse'})
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also pass a layout option if you don't want fzf window to take up the
|
||||||
|
entire screen.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" up / down / left / right / window are allowed
|
||||||
|
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'left': '40%'})
|
||||||
|
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'window': '30vnew'})
|
||||||
|
```
|
||||||
|
|
||||||
|
`source` doesn't have to be an external shell command, you can pass a Vim
|
||||||
|
array as the source. In the next example, we pass the names of color
|
||||||
|
schemes as the source to implement a color scheme selector.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
call fzf#run({'source': map(split(globpath(&rtp, 'colors/*.vim')),
|
||||||
|
\ 'fnamemodify(v:val, ":t:r")'),
|
||||||
|
\ 'sink': 'colo', 'left': '25%'})
|
||||||
|
```
|
||||||
|
|
||||||
|
The following table summarizes the available options.
|
||||||
|
|
||||||
|
| Option name | Type | Description |
|
||||||
|
| -------------------------- | ------------- | ---------------------------------------------------------------- |
|
||||||
|
| `source` | string | External command to generate input to fzf (e.g. `find .`) |
|
||||||
|
| `source` | list | Vim list as input to fzf |
|
||||||
|
| `sink` | string | Vim command to handle the selected item (e.g. `e`, `tabe`) |
|
||||||
|
| `sink` | funcref | Function to be called with each selected item |
|
||||||
|
| `sinklist` (or `sink*`) | funcref | Similar to `sink`, but takes the list of output lines at once |
|
||||||
|
| `exit` | funcref | Function to be called with the exit status of fzf (e.g. 0, 1, 2, 130) |
|
||||||
|
| `options` | string/list | Options to fzf |
|
||||||
|
| `dir` | string | Working directory |
|
||||||
|
| `up`/`down`/`left`/`right` | number/string | (Layout) Window position and size (e.g. `20`, `50%`) |
|
||||||
|
| `tmux` | string | (Layout) `--tmux` options (e.g. `90%,70%`) |
|
||||||
|
| `window` (Vim 8 / Neovim) | string | (Layout) Command to open fzf window (e.g. `vertical aboveleft 30new`) |
|
||||||
|
| `window` (Vim 8 / Neovim) | dict | (Layout) Popup window settings (e.g. `{'width': 0.9, 'height': 0.6}`) |
|
||||||
|
|
||||||
|
`options` entry can be either a string or a list. For simple cases, string
|
||||||
|
should suffice, but prefer to use list type to avoid escaping issues.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
call fzf#run({'options': '--reverse --prompt "C:\\Program Files\\"'})
|
||||||
|
call fzf#run({'options': ['--reverse', '--prompt', 'C:\Program Files\']})
|
||||||
|
```
|
||||||
|
|
||||||
|
When `window` entry is a dictionary, fzf will start in a popup window. The
|
||||||
|
following options are allowed:
|
||||||
|
|
||||||
|
- Required:
|
||||||
|
- `width` [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||||
|
- `height` [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||||
|
- Optional:
|
||||||
|
- `yoffset` [float default 0.5 range [0 ~ 1]]
|
||||||
|
- `xoffset` [float default 0.5 range [0 ~ 1]]
|
||||||
|
- `relative` [boolean default v:false]
|
||||||
|
- `border` [string default `rounded` (`sharp` on Windows)]: Border style
|
||||||
|
- `rounded` / `sharp` / `horizontal` / `vertical` / `top` / `bottom` / `left` / `right` / `no[ne]`
|
||||||
|
|
||||||
|
`fzf#wrap`
|
||||||
|
----------
|
||||||
|
|
||||||
|
We have seen that several aspects of `:FZF` command can be configured with
|
||||||
|
a set of global option variables; different ways to open files
|
||||||
|
(`g:fzf_action`), window position and size (`g:fzf_layout`), color palette
|
||||||
|
(`g:fzf_colors`), etc.
|
||||||
|
|
||||||
|
So how can we make our custom `fzf#run` calls also respect those variables?
|
||||||
|
Simply by *"wrapping"* the spec dictionary with `fzf#wrap` before passing it
|
||||||
|
to `fzf#run`.
|
||||||
|
|
||||||
|
- **`fzf#wrap([name string], [spec dict], [fullscreen bool]) -> (dict)`**
|
||||||
|
- All arguments are optional. Usually we only need to pass a spec dictionary.
|
||||||
|
- `name` is for managing history files. It is ignored if
|
||||||
|
`g:fzf_history_dir` is not defined.
|
||||||
|
- `fullscreen` can be either `0` or `1` (default: 0).
|
||||||
|
|
||||||
|
`fzf#wrap` takes a spec and returns an extended version of it (also
|
||||||
|
a dictionary) with additional options for addressing global preferences. You
|
||||||
|
can examine the return value of it like so:
|
||||||
|
|
||||||
|
```vim
|
||||||
|
echo fzf#wrap({'source': 'ls'})
|
||||||
|
```
|
||||||
|
|
||||||
|
After we *"wrap"* our spec, we pass it to `fzf#run`.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||||
|
```
|
||||||
|
|
||||||
|
Now it supports `CTRL-T`, `CTRL-V`, and `CTRL-X` key bindings (configurable
|
||||||
|
via `g:fzf_action`) and it opens fzf window according to `g:fzf_layout`
|
||||||
|
setting.
|
||||||
|
|
||||||
|
To make it easier to use, let's define `LS` command.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
command! LS call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||||
|
```
|
||||||
|
|
||||||
|
Type `:LS` and see how it works.
|
||||||
|
|
||||||
|
We would like to make `:LS!` (bang version) open fzf in fullscreen, just like
|
||||||
|
`:FZF!`. Add `-bang` to command definition, and use `<bang>` value to set
|
||||||
|
the last `fullscreen` argument of `fzf#wrap` (see `:help <bang>`).
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" On :LS!, <bang> evaluates to '!', and '!0' becomes 1
|
||||||
|
command! -bang LS call fzf#run(fzf#wrap({'source': 'ls'}, <bang>0))
|
||||||
|
```
|
||||||
|
|
||||||
|
Our `:LS` command will be much more useful if we can pass a directory argument
|
||||||
|
to it, so that something like `:LS /tmp` is possible.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
command! -bang -complete=dir -nargs=? LS
|
||||||
|
\ call fzf#run(fzf#wrap({'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||||
|
```
|
||||||
|
|
||||||
|
Lastly, if you have enabled `g:fzf_history_dir`, you might want to assign
|
||||||
|
a unique name to our command and pass it as the first argument to `fzf#wrap`.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" The query history for this command will be stored as 'ls' inside g:fzf_history_dir.
|
||||||
|
" The name is ignored if g:fzf_history_dir is not defined.
|
||||||
|
command! -bang -complete=dir -nargs=? LS
|
||||||
|
\ call fzf#run(fzf#wrap('ls', {'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Global options supported by `fzf#wrap`
|
||||||
|
|
||||||
|
- `g:fzf_layout`
|
||||||
|
- `g:fzf_action`
|
||||||
|
- **Works only when no custom `sink` (or `sinklist`) is provided**
|
||||||
|
- Having custom sink usually means that each entry is not an ordinary
|
||||||
|
file path (e.g. name of color scheme), so we can't blindly apply the
|
||||||
|
same strategy (i.e. `tabedit some-color-scheme` doesn't make sense)
|
||||||
|
- `g:fzf_colors`
|
||||||
|
- `g:fzf_history_dir`
|
||||||
|
|
||||||
|
Tips
|
||||||
|
----
|
||||||
|
|
||||||
|
### fzf inside terminal buffer
|
||||||
|
|
||||||
|
On the latest versions of Vim and Neovim, fzf will start in a terminal buffer.
|
||||||
|
If you find the default ANSI colors to be different, consider configuring the
|
||||||
|
colors using `g:terminal_ansi_colors` in regular Vim or `g:terminal_color_x`
|
||||||
|
in Neovim.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" Terminal colors for seoul256 color scheme
|
||||||
|
if has('nvim')
|
||||||
|
let g:terminal_color_0 = '#4e4e4e'
|
||||||
|
let g:terminal_color_1 = '#d68787'
|
||||||
|
let g:terminal_color_2 = '#5f865f'
|
||||||
|
let g:terminal_color_3 = '#d8af5f'
|
||||||
|
let g:terminal_color_4 = '#85add4'
|
||||||
|
let g:terminal_color_5 = '#d7afaf'
|
||||||
|
let g:terminal_color_6 = '#87afaf'
|
||||||
|
let g:terminal_color_7 = '#d0d0d0'
|
||||||
|
let g:terminal_color_8 = '#626262'
|
||||||
|
let g:terminal_color_9 = '#d75f87'
|
||||||
|
let g:terminal_color_10 = '#87af87'
|
||||||
|
let g:terminal_color_11 = '#ffd787'
|
||||||
|
let g:terminal_color_12 = '#add4fb'
|
||||||
|
let g:terminal_color_13 = '#ffafaf'
|
||||||
|
let g:terminal_color_14 = '#87d7d7'
|
||||||
|
let g:terminal_color_15 = '#e4e4e4'
|
||||||
|
else
|
||||||
|
let g:terminal_ansi_colors = [
|
||||||
|
\ '#4e4e4e', '#d68787', '#5f865f', '#d8af5f',
|
||||||
|
\ '#85add4', '#d7afaf', '#87afaf', '#d0d0d0',
|
||||||
|
\ '#626262', '#d75f87', '#87af87', '#ffd787',
|
||||||
|
\ '#add4fb', '#ffafaf', '#87d7d7', '#e4e4e4'
|
||||||
|
\ ]
|
||||||
|
endif
|
||||||
|
```
|
||||||
|
|
||||||
|
### Starting fzf in a popup window
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" Required:
|
||||||
|
" - width [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||||
|
" - height [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||||
|
"
|
||||||
|
" Optional:
|
||||||
|
" - xoffset [float default 0.5 range [0 ~ 1]]
|
||||||
|
" - yoffset [float default 0.5 range [0 ~ 1]]
|
||||||
|
" - relative [boolean default v:false]
|
||||||
|
" - border [string default 'rounded']: Border style
|
||||||
|
" - 'rounded' / 'sharp' / 'horizontal' / 'vertical' / 'top' / 'bottom' / 'left' / 'right'
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, you can make fzf open in a tmux popup window (requires tmux 3.2
|
||||||
|
or above) by putting `--tmux` option value in `tmux` key.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
" See `--tmux` option in `man fzf` for available options
|
||||||
|
" [center|top|bottom|left|right][,SIZE[%]][,SIZE[%]]
|
||||||
|
if exists('$TMUX')
|
||||||
|
let g:fzf_layout = { 'tmux': '90%,70%' }
|
||||||
|
else
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||||
|
endif
|
||||||
|
```
|
||||||
|
|
||||||
|
### Hide statusline
|
||||||
|
|
||||||
|
When fzf starts in a terminal buffer, the file type of the buffer is set to
|
||||||
|
`fzf`. So you can set up `FileType fzf` autocmd to customize the settings of
|
||||||
|
the window.
|
||||||
|
|
||||||
|
For example, if you open fzf on the bottom on the screen (e.g. `{'down':
|
||||||
|
'40%'}`), you might want to temporarily disable the statusline for a cleaner
|
||||||
|
look.
|
||||||
|
|
||||||
|
```vim
|
||||||
|
let g:fzf_layout = { 'down': '30%' }
|
||||||
|
autocmd! FileType fzf
|
||||||
|
autocmd FileType fzf set laststatus=0 noshowmode noruler
|
||||||
|
\| autocmd BufLeave <buffer> set laststatus=2 showmode ruler
|
||||||
|
```
|
||||||
|
|
||||||
|
[License](LICENSE)
|
||||||
|
------------------
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013-2025 Junegunn Choi
|
||||||
33
SECURITY.md
Normal file
33
SECURITY.md
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Security Reporting
|
||||||
|
|
||||||
|
If you wish to report a security vulnerability privately, we appreciate your diligence. Please follow the guidelines below to submit your report.
|
||||||
|
|
||||||
|
## Reporting
|
||||||
|
|
||||||
|
To report a security vulnerability, please provide the following information:
|
||||||
|
|
||||||
|
1. **PROJECT**
|
||||||
|
- https://github.com/junegunn/fzf
|
||||||
|
|
||||||
|
2. **PUBLIC**
|
||||||
|
- Indicate whether this vulnerability has already been publicly discussed or disclosed.
|
||||||
|
- If so, provide relevant links.
|
||||||
|
|
||||||
|
3. **DESCRIPTION**
|
||||||
|
- Provide a detailed description of the security vulnerability.
|
||||||
|
- Include as much information as possible to help us understand and address the issue.
|
||||||
|
|
||||||
|
Send this information, along with any additional relevant details, to <junegunn.c AT gmail DOT com>.
|
||||||
|
|
||||||
|
## Confidentiality
|
||||||
|
|
||||||
|
We kindly ask you to keep the report confidential until a public announcement is made.
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Vulnerabilities will be handled on a best-effort basis.
|
||||||
|
- You may request an advance copy of the patched release, but we cannot guarantee early access before the public release.
|
||||||
|
- You will be notified via email simultaneously with the public announcement.
|
||||||
|
- We will respond within a few weeks to confirm whether your report has been accepted or rejected.
|
||||||
|
|
||||||
|
Thank you for helping to improve the security of our project!
|
||||||
86
bin/fzf-preview.sh
Executable file
86
bin/fzf-preview.sh
Executable file
@@ -0,0 +1,86 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# The purpose of this script is to demonstrate how to preview a file or an
|
||||||
|
# image in the preview window of fzf.
|
||||||
|
#
|
||||||
|
# Dependencies:
|
||||||
|
# - https://github.com/sharkdp/bat
|
||||||
|
# - https://github.com/hpjansson/chafa
|
||||||
|
# - https://iterm2.com/utilities/imgcat
|
||||||
|
|
||||||
|
if [[ $# -ne 1 ]]; then
|
||||||
|
>&2 echo "usage: $0 FILENAME[:LINENO][:IGNORED]"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
file=${1/#\~\//$HOME/}
|
||||||
|
|
||||||
|
center=0
|
||||||
|
if [[ ! -r $file ]]; then
|
||||||
|
if [[ $file =~ ^(.+):([0-9]+)\ *$ ]] && [[ -r ${BASH_REMATCH[1]} ]]; then
|
||||||
|
file=${BASH_REMATCH[1]}
|
||||||
|
center=${BASH_REMATCH[2]}
|
||||||
|
elif [[ $file =~ ^(.+):([0-9]+):[0-9]+\ *$ ]] && [[ -r ${BASH_REMATCH[1]} ]]; then
|
||||||
|
file=${BASH_REMATCH[1]}
|
||||||
|
center=${BASH_REMATCH[2]}
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
type=$(file --brief --dereference --mime -- "$file")
|
||||||
|
|
||||||
|
if [[ ! $type =~ image/ ]]; then
|
||||||
|
if [[ $type =~ =binary ]]; then
|
||||||
|
file "$1"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Sometimes bat is installed as batcat.
|
||||||
|
if command -v batcat > /dev/null; then
|
||||||
|
batname="batcat"
|
||||||
|
elif command -v bat > /dev/null; then
|
||||||
|
batname="bat"
|
||||||
|
else
|
||||||
|
cat "$1"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
${batname} --style="${BAT_STYLE:-numbers}" --color=always --pager=never --highlight-line="${center:-0}" -- "$file"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||||
|
if [[ $dim == x ]]; then
|
||||||
|
dim=$(stty size < /dev/tty | awk '{print $2 "x" $1}')
|
||||||
|
elif ! [[ $KITTY_WINDOW_ID ]] && ((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size < /dev/tty | awk '{print $1}'))); then
|
||||||
|
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
|
||||||
|
# * https://github.com/junegunn/fzf/issues/2544
|
||||||
|
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 1. Use icat (from Kitty) if kitten is installed
|
||||||
|
if [[ $KITTY_WINDOW_ID ]] || [[ $GHOSTTY_RESOURCES_DIR ]] && command -v kitten > /dev/null; then
|
||||||
|
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
|
||||||
|
# you have to use 'stream'.
|
||||||
|
#
|
||||||
|
# 2. The last line of the output is the ANSI reset code without newline.
|
||||||
|
# This confuses fzf and makes it render scroll offset indicator.
|
||||||
|
# So we remove the last line and append the reset code to its previous line.
|
||||||
|
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
|
||||||
|
|
||||||
|
# 2. Use chafa with Sixel output
|
||||||
|
elif command -v chafa > /dev/null; then
|
||||||
|
chafa -s "$dim" "$file"
|
||||||
|
# Add a new line character so that fzf can display multiple images in the preview window
|
||||||
|
echo
|
||||||
|
|
||||||
|
# 3. If chafa is not found but imgcat is available, use it on iTerm2
|
||||||
|
elif command -v imgcat > /dev/null; then
|
||||||
|
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
|
||||||
|
# user is running iTerm2. But for the sake of simplicity, we just assume
|
||||||
|
# that's the case here.
|
||||||
|
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||||
|
|
||||||
|
# 4. Cannot find any suitable method to preview the image
|
||||||
|
else
|
||||||
|
file "$file"
|
||||||
|
fi
|
||||||
202
bin/fzf-tmux
202
bin/fzf-tmux
@@ -1,14 +1,14 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# fzf-tmux: starts fzf in a tmux pane
|
# fzf-tmux: starts fzf in a tmux pane
|
||||||
# usage: fzf-tmux [-u|-d [HEIGHT[%]]] [-l|-r [WIDTH[%]]] [--] [FZF OPTIONS]
|
# usage: fzf-tmux [LAYOUT OPTIONS] [--] [FZF OPTIONS]
|
||||||
|
|
||||||
fail() {
|
fail() {
|
||||||
>&2 echo "$1"
|
>&2 echo "$1"
|
||||||
exit 2
|
exit 2
|
||||||
}
|
}
|
||||||
|
|
||||||
fzf="$(command -v fzf 2> /dev/null)" || fzf="$(dirname "$0")/fzf"
|
fzf="$(command which fzf)" || fzf="$(dirname "$0")/fzf"
|
||||||
[[ -x "$fzf" ]] || fail 'fzf executable not found'
|
[[ -x $fzf ]] || fail 'fzf executable not found'
|
||||||
|
|
||||||
args=()
|
args=()
|
||||||
opt=""
|
opt=""
|
||||||
@@ -16,19 +16,30 @@ skip=""
|
|||||||
swap=""
|
swap=""
|
||||||
close=""
|
close=""
|
||||||
term=""
|
term=""
|
||||||
[[ -n "$LINES" ]] && lines=$LINES || lines=$(tput lines)
|
[[ -n $LINES ]] && lines=$LINES || lines=$(tput lines) || lines=$(tmux display-message -p "#{pane_height}")
|
||||||
[[ -n "$COLUMNS" ]] && columns=$COLUMNS || columns=$(tput cols)
|
[[ -n $COLUMNS ]] && columns=$COLUMNS || columns=$(tput cols) || columns=$(tmux display-message -p "#{pane_width}")
|
||||||
|
|
||||||
|
tmux_version=$(tmux -V | sed 's/[^0-9.]//g')
|
||||||
|
tmux_32=$(awk '{print ($1 >= 3.2)}' <<< "$tmux_version" 2> /dev/null || bc -l <<< "$tmux_version >= 3.2")
|
||||||
|
|
||||||
help() {
|
help() {
|
||||||
>&2 echo 'usage: fzf-tmux [-u|-d [HEIGHT[%]]] [-l|-r [WIDTH[%]]] [--] [FZF OPTIONS]
|
>&2 echo 'usage: fzf-tmux [LAYOUT OPTIONS] [--] [FZF OPTIONS]
|
||||||
|
|
||||||
Layout
|
LAYOUT OPTIONS:
|
||||||
-u [HEIGHT[%]] Split above (up)
|
(default layout: -d 50%)
|
||||||
-d [HEIGHT[%]] Split below (down)
|
|
||||||
-l [WIDTH[%]] Split left
|
|
||||||
-r [WIDTH[%]] Split right
|
|
||||||
|
|
||||||
(default: -d 50%)
|
Popup window (requires tmux 3.2 or above):
|
||||||
|
-p [WIDTH[%][,HEIGHT[%]]] (default: 50%)
|
||||||
|
-w WIDTH[%]
|
||||||
|
-h HEIGHT[%]
|
||||||
|
-x COL
|
||||||
|
-y ROW
|
||||||
|
|
||||||
|
Split pane:
|
||||||
|
-u [HEIGHT[%]] Split above (up)
|
||||||
|
-d [HEIGHT[%]] Split below (down)
|
||||||
|
-l [WIDTH[%]] Split left
|
||||||
|
-r [WIDTH[%]] Split right
|
||||||
'
|
'
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
@@ -36,7 +47,7 @@ help() {
|
|||||||
while [[ $# -gt 0 ]]; do
|
while [[ $# -gt 0 ]]; do
|
||||||
arg="$1"
|
arg="$1"
|
||||||
shift
|
shift
|
||||||
[[ -z "$skip" ]] && case "$arg" in
|
[[ -z $skip ]] && case "$arg" in
|
||||||
-)
|
-)
|
||||||
term=1
|
term=1
|
||||||
;;
|
;;
|
||||||
@@ -47,17 +58,19 @@ while [[ $# -gt 0 ]]; do
|
|||||||
echo "fzf-tmux (with fzf $("$fzf" --version))"
|
echo "fzf-tmux (with fzf $("$fzf" --version))"
|
||||||
exit
|
exit
|
||||||
;;
|
;;
|
||||||
-w*|-h*|-d*|-u*|-r*|-l*)
|
-p* | -w* | -h* | -x* | -y* | -d* | -u* | -r* | -l*)
|
||||||
if [[ "$arg" =~ ^.[lrw] ]]; then
|
if [[ $arg =~ ^-[pwhxy] ]]; then
|
||||||
|
[[ $opt =~ "-E" ]] || opt="-E"
|
||||||
|
elif [[ $arg =~ ^.[lr] ]]; then
|
||||||
opt="-h"
|
opt="-h"
|
||||||
if [[ "$arg" =~ ^.l ]]; then
|
if [[ $arg =~ ^.l ]]; then
|
||||||
opt="$opt -d"
|
opt="$opt -d"
|
||||||
swap="; swap-pane -D ; select-pane -L"
|
swap="; swap-pane -D ; select-pane -L"
|
||||||
close="; tmux swap-pane -D"
|
close="; tmux swap-pane -D"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
opt=""
|
opt=""
|
||||||
if [[ "$arg" =~ ^.u ]]; then
|
if [[ $arg =~ ^.u ]]; then
|
||||||
opt="$opt -d"
|
opt="$opt -d"
|
||||||
swap="; swap-pane -D ; select-pane -U"
|
swap="; swap-pane -D ; select-pane -U"
|
||||||
close="; tmux swap-pane -D"
|
close="; tmux swap-pane -D"
|
||||||
@@ -66,7 +79,7 @@ while [[ $# -gt 0 ]]; do
|
|||||||
if [[ ${#arg} -gt 2 ]]; then
|
if [[ ${#arg} -gt 2 ]]; then
|
||||||
size="${arg:2}"
|
size="${arg:2}"
|
||||||
else
|
else
|
||||||
if [[ "$1" =~ ^[0-9]+%?$ ]]; then
|
if [[ $1 =~ ^[0-9%,]+$ ]] || [[ $1 =~ ^[A-Z]$ ]]; then
|
||||||
size="$1"
|
size="$1"
|
||||||
shift
|
shift
|
||||||
else
|
else
|
||||||
@@ -74,21 +87,37 @@ while [[ $# -gt 0 ]]; do
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$size" =~ %$ ]]; then
|
if [[ $arg =~ ^-p ]]; then
|
||||||
size=${size:0:((${#size}-1))}
|
if [[ -n $size ]]; then
|
||||||
if [[ -n "$swap" ]]; then
|
w=${size%%,*}
|
||||||
opt="$opt -p $(( 100 - size ))"
|
h=${size##*,}
|
||||||
|
opt="$opt -w$w -h$h"
|
||||||
|
fi
|
||||||
|
elif [[ $arg =~ ^-[whxy] ]]; then
|
||||||
|
opt="$opt ${arg:0:2}$size"
|
||||||
|
elif [[ $size =~ %$ ]]; then
|
||||||
|
size=${size:0:${#size}-1}
|
||||||
|
if [[ $tmux_32 == 1 ]]; then
|
||||||
|
if [[ -n $swap ]]; then
|
||||||
|
opt="$opt -l $((100 - size))%"
|
||||||
|
else
|
||||||
|
opt="$opt -l $size%"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
opt="$opt -p $size"
|
if [[ -n $swap ]]; then
|
||||||
|
opt="$opt -p $((100 - size))"
|
||||||
|
else
|
||||||
|
opt="$opt -p $size"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
if [[ -n "$swap" ]]; then
|
if [[ -n $swap ]]; then
|
||||||
if [[ "$arg" =~ ^.l ]]; then
|
if [[ $arg =~ ^.l ]]; then
|
||||||
max=$columns
|
max=$columns
|
||||||
else
|
else
|
||||||
max=$lines
|
max=$lines
|
||||||
fi
|
fi
|
||||||
size=$(( max - size ))
|
size=$((max - size))
|
||||||
[[ $size -lt 0 ]] && size=0
|
[[ $size -lt 0 ]] && size=0
|
||||||
opt="$opt -l $size"
|
opt="$opt -l $size"
|
||||||
else
|
else
|
||||||
@@ -106,22 +135,24 @@ while [[ $# -gt 0 ]]; do
|
|||||||
args+=("$arg")
|
args+=("$arg")
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
[[ -n "$skip" ]] && args+=("$arg")
|
[[ -n $skip ]] && args+=("$arg")
|
||||||
done
|
done
|
||||||
|
|
||||||
if [[ -z "$TMUX" || "$opt" =~ ^-h && "$columns" -le 40 || ! "$opt" =~ ^-h && "$lines" -le 15 ]]; then
|
if [[ -z $TMUX ]]; then
|
||||||
"$fzf" "${args[@]}"
|
"$fzf" "${args[@]}"
|
||||||
exit $?
|
exit $?
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# --height option is not allowed
|
# * --height option is not allowed
|
||||||
args+=("--no-height")
|
# * CTRL-Z is also disabled
|
||||||
|
# * fzf-tmux script is not compatible with --tmux option in fzf 0.53.0 or later
|
||||||
|
args=("${args[@]}" "--no-height" "--bind=ctrl-z:ignore" "--no-tmux")
|
||||||
|
|
||||||
# Handle zoomed tmux pane by moving it to a temp window
|
# Handle zoomed tmux pane without popup options by moving it to a temp window
|
||||||
if tmux list-panes -F '#F' | grep -q Z; then
|
if [[ ! $opt =~ "-E" ]] && tmux list-panes -F '#F' | grep -q Z; then
|
||||||
zoomed=1
|
zoomed_without_popup=1
|
||||||
original_window=$(tmux display-message -p "#{window_id}")
|
original_window=$(tmux display-message -p "#{window_id}")
|
||||||
tmp_window=$(tmux new-window -d -P -F "#{window_id}" "bash -c 'while :; do for c in \\| / - \\\\; do sleep 0.2; printf \"\\r\$c fzf-tmux is running\\r\"; done; done'")
|
tmp_window=$(tmux new-window -d -P -F "#{window_id}" "bash -c 'while :; do for c in \\| / - '\\;' do sleep 0.2; printf \"\\r\$c fzf-tmux is running\\r\"; done; done'")
|
||||||
tmux swap-pane -t $tmp_window \; select-window -t $tmp_window
|
tmux swap-pane -t $tmp_window \; select-window -t $tmp_window
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -133,51 +164,94 @@ argsf="${TMPDIR:-/tmp}/fzf-args-$id"
|
|||||||
fifo1="${TMPDIR:-/tmp}/fzf-fifo1-$id"
|
fifo1="${TMPDIR:-/tmp}/fzf-fifo1-$id"
|
||||||
fifo2="${TMPDIR:-/tmp}/fzf-fifo2-$id"
|
fifo2="${TMPDIR:-/tmp}/fzf-fifo2-$id"
|
||||||
fifo3="${TMPDIR:-/tmp}/fzf-fifo3-$id"
|
fifo3="${TMPDIR:-/tmp}/fzf-fifo3-$id"
|
||||||
|
if tmux_win_opts=$(tmux show-options -p remain-on-exit \; show-options -p synchronize-panes 2> /dev/null); then
|
||||||
|
tmux_win_opts=($(sed '/ off/d; s/synchronize-panes/set-option -p synchronize-panes/; s/remain-on-exit/set-option -p remain-on-exit/; s/$/ \\;/' <<< "$tmux_win_opts"))
|
||||||
|
tmux_off_opts='; set-option -p synchronize-panes off ; set-option -p remain-on-exit off'
|
||||||
|
else
|
||||||
|
tmux_win_opts=($(tmux show-window-options remain-on-exit \; show-window-options synchronize-panes | sed '/ off/d; s/^/set-window-option /; s/$/ \\;/'))
|
||||||
|
tmux_off_opts='; set-window-option synchronize-panes off ; set-window-option remain-on-exit off'
|
||||||
|
fi
|
||||||
cleanup() {
|
cleanup() {
|
||||||
rm -f $argsf $fifo1 $fifo2 $fifo3
|
\rm -f $argsf $fifo1 $fifo2 $fifo3
|
||||||
|
|
||||||
# Remove temp window if we were zoomed
|
# Restore tmux window options
|
||||||
if [[ -n "$zoomed" ]]; then
|
if [[ ${#tmux_win_opts[@]} -gt 1 ]]; then
|
||||||
|
eval "tmux ${tmux_win_opts[*]}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove temp window if we were zoomed without popup options
|
||||||
|
if [[ -n $zoomed_without_popup ]]; then
|
||||||
|
tmux display-message -p "#{window_id}" > /dev/null
|
||||||
tmux swap-pane -t $original_window \; \
|
tmux swap-pane -t $original_window \; \
|
||||||
select-window -t $original_window \; \
|
select-window -t $original_window \; \
|
||||||
kill-window -t $tmp_window \; \
|
kill-window -t $tmp_window \; \
|
||||||
resize-pane -Z
|
resize-pane -Z
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ $# -gt 0 ]]; then
|
||||||
|
trap - EXIT
|
||||||
|
exit 130
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
trap cleanup EXIT SIGINT SIGTERM
|
trap 'cleanup 1' SIGUSR1
|
||||||
|
trap 'cleanup' EXIT
|
||||||
|
|
||||||
envs="env TERM=$TERM "
|
envs="export TERM=$TERM "
|
||||||
[[ -n "$FZF_DEFAULT_OPTS" ]] && envs="$envs FZF_DEFAULT_OPTS=$(printf %q "$FZF_DEFAULT_OPTS")"
|
if [[ $opt =~ "-E" ]]; then
|
||||||
[[ -n "$FZF_DEFAULT_COMMAND" ]] && envs="$envs FZF_DEFAULT_COMMAND=$(printf %q "$FZF_DEFAULT_COMMAND")"
|
if [[ $tmux_version == 3.2 ]]; then
|
||||||
|
FZF_DEFAULT_OPTS="--margin 0,1 $FZF_DEFAULT_OPTS"
|
||||||
mkfifo -m o+w $fifo2
|
elif [[ $tmux_32 == 1 ]]; then
|
||||||
mkfifo -m o+w $fifo3
|
FZF_DEFAULT_OPTS="--border $FZF_DEFAULT_OPTS"
|
||||||
|
opt="-B $opt"
|
||||||
|
else
|
||||||
|
echo "fzf-tmux: tmux 3.2 or above is required for popup mode" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
envs="$envs FZF_DEFAULT_COMMAND=$(printf %q "$FZF_DEFAULT_COMMAND")"
|
||||||
|
envs="$envs FZF_DEFAULT_OPTS=$(printf %q "$FZF_DEFAULT_OPTS")"
|
||||||
|
envs="$envs FZF_DEFAULT_OPTS_FILE=$(printf %q "$FZF_DEFAULT_OPTS_FILE")"
|
||||||
|
[[ -n $RUNEWIDTH_EASTASIAN ]] && envs="$envs RUNEWIDTH_EASTASIAN=$(printf %q "$RUNEWIDTH_EASTASIAN")"
|
||||||
|
[[ -n $BAT_THEME ]] && envs="$envs BAT_THEME=$(printf %q "$BAT_THEME")"
|
||||||
|
echo "$envs;" > "$argsf"
|
||||||
|
|
||||||
# Build arguments to fzf
|
# Build arguments to fzf
|
||||||
opts=""
|
opts=$(printf "%q " "${args[@]}")
|
||||||
for arg in "${args[@]}"; do
|
|
||||||
arg="${arg//\\/\\\\}"
|
|
||||||
arg="${arg//\"/\\\"}"
|
|
||||||
arg="${arg//\`/\\\`}"
|
|
||||||
arg="${arg//$/\\$}"
|
|
||||||
opts="$opts \"$arg\""
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -n "$term" ]] || [[ -t 0 ]]; then
|
pppid=$$
|
||||||
cat <<< "\"$fzf\" $opts > $fifo2; echo \$? > $fifo3 $close" > $argsf
|
echo -n "trap 'kill -SIGUSR1 -$pppid' EXIT SIGINT SIGTERM;" >> $argsf
|
||||||
TMUX=$(echo $TMUX | cut -d , -f 1,2) tmux set-window-option synchronize-panes off \;\
|
close="; trap - EXIT SIGINT SIGTERM $close"
|
||||||
set-window-option remain-on-exit off \;\
|
|
||||||
split-window $opt "cd $(printf %q "$PWD");$envs bash $argsf" $swap \
|
export TMUX=$(cut -d , -f 1,2 <<< "$TMUX")
|
||||||
> /dev/null 2>&1
|
mkfifo -m o+w $fifo2
|
||||||
|
if [[ $opt =~ "-E" ]]; then
|
||||||
|
cat $fifo2 &
|
||||||
|
if [[ -n $term ]] || [[ -t 0 ]]; then
|
||||||
|
cat <<< "\"$fzf\" $opts > $fifo2; out=\$? $close; exit \$out" >> $argsf
|
||||||
|
else
|
||||||
|
mkfifo $fifo1
|
||||||
|
cat <<< "\"$fzf\" $opts < $fifo1 > $fifo2; out=\$? $close; exit \$out" >> $argsf
|
||||||
|
cat <&0 > $fifo1 &
|
||||||
|
fi
|
||||||
|
|
||||||
|
tmux popup -d "$PWD" $opt "bash $argsf" > /dev/null 2>&1
|
||||||
|
exit $?
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkfifo -m o+w $fifo3
|
||||||
|
if [[ -n $term ]] || [[ -t 0 ]]; then
|
||||||
|
cat <<< "\"$fzf\" $opts > $fifo2; echo \$? > $fifo3 $close" >> $argsf
|
||||||
else
|
else
|
||||||
mkfifo $fifo1
|
mkfifo $fifo1
|
||||||
cat <<< "\"$fzf\" $opts < $fifo1 > $fifo2; echo \$? > $fifo3 $close" > $argsf
|
cat <<< "\"$fzf\" $opts < $fifo1 > $fifo2; echo \$? > $fifo3 $close" >> $argsf
|
||||||
TMUX=$(echo $TMUX | cut -d , -f 1,2) tmux set-window-option synchronize-panes off \;\
|
|
||||||
set-window-option remain-on-exit off \;\
|
|
||||||
split-window $opt "$envs bash $argsf" $swap \
|
|
||||||
> /dev/null 2>&1
|
|
||||||
cat <&0 > $fifo1 &
|
cat <&0 > $fifo1 &
|
||||||
fi
|
fi
|
||||||
|
tmux \
|
||||||
|
split-window -c "$PWD" $opt "bash -c 'exec -a fzf bash $argsf'" $swap \
|
||||||
|
$tmux_off_opts \
|
||||||
|
> /dev/null 2>&1 || {
|
||||||
|
"$fzf" "${args[@]}"
|
||||||
|
exit $?
|
||||||
|
}
|
||||||
cat $fifo2
|
cat $fifo2
|
||||||
exit "$(cat $fifo3)"
|
exit "$(cat $fifo3)"
|
||||||
|
|
||||||
|
|||||||
509
doc/fzf.txt
Normal file
509
doc/fzf.txt
Normal file
@@ -0,0 +1,509 @@
|
|||||||
|
fzf.txt fzf Last change: February 15 2024
|
||||||
|
FZF - TABLE OF CONTENTS *fzf* *fzf-toc*
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
FZF Vim integration |fzf-vim-integration|
|
||||||
|
Installation |fzf-installation|
|
||||||
|
Summary |fzf-summary|
|
||||||
|
:FZF[!] |:FZF|
|
||||||
|
Configuration |fzf-configuration|
|
||||||
|
Examples |fzf-examples|
|
||||||
|
Explanation of g:fzf_colors |fzf-explanation-of-gfzfcolors|
|
||||||
|
fzf#run |fzf#run|
|
||||||
|
fzf#wrap |fzf#wrap|
|
||||||
|
Global options supported by fzf#wrap |fzf-global-options-supported-by-fzf#wrap|
|
||||||
|
Tips |fzf-tips|
|
||||||
|
fzf inside terminal buffer |fzf-inside-terminal-buffer|
|
||||||
|
Starting fzf in a popup window |fzf-starting-fzf-in-a-popup-window|
|
||||||
|
Hide statusline |fzf-hide-statusline|
|
||||||
|
License |fzf-license|
|
||||||
|
|
||||||
|
FZF VIM INTEGRATION *fzf-vim-integration*
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
INSTALLATION *fzf-installation*
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
Once you have fzf installed, you can enable it inside Vim simply by adding the
|
||||||
|
directory to 'runtimepath' in your Vim configuration file. The path may differ
|
||||||
|
depending on the package manager.
|
||||||
|
>
|
||||||
|
" If installed using Homebrew
|
||||||
|
set rtp+=/usr/local/opt/fzf
|
||||||
|
|
||||||
|
" If installed using Homebrew on Apple Silicon
|
||||||
|
set rtp+=/opt/homebrew/opt/fzf
|
||||||
|
|
||||||
|
" If you have cloned fzf on ~/.fzf directory
|
||||||
|
set rtp+=~/.fzf
|
||||||
|
<
|
||||||
|
If you use {vim-plug}{1}, the same can be written as:
|
||||||
|
>
|
||||||
|
" If installed using Homebrew
|
||||||
|
Plug '/usr/local/opt/fzf'
|
||||||
|
|
||||||
|
" If installed using Homebrew on Apple Silicon
|
||||||
|
Plug '/opt/homebrew/opt/fzf'
|
||||||
|
|
||||||
|
" If you have cloned fzf on ~/.fzf directory
|
||||||
|
Plug '~/.fzf'
|
||||||
|
<
|
||||||
|
But if you want the latest Vim plugin file from GitHub rather than the one
|
||||||
|
included in the package, write:
|
||||||
|
>
|
||||||
|
Plug 'junegunn/fzf'
|
||||||
|
<
|
||||||
|
The Vim plugin will pick up fzf binary available on the system. If fzf is not
|
||||||
|
found on `$PATH`, it will ask you if it should download the latest binary for
|
||||||
|
you.
|
||||||
|
|
||||||
|
To make sure that you have the latest version of the binary, set up
|
||||||
|
post-update hook like so:
|
||||||
|
|
||||||
|
*fzf#install*
|
||||||
|
>
|
||||||
|
Plug 'junegunn/fzf', { 'do': { -> fzf#install() } }
|
||||||
|
<
|
||||||
|
{1} https://github.com/junegunn/vim-plug
|
||||||
|
|
||||||
|
|
||||||
|
SUMMARY *fzf-summary*
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
The Vim plugin of fzf provides two core functions, and `:FZF` command which is
|
||||||
|
the basic file selector command built on top of them.
|
||||||
|
|
||||||
|
1. `fzf#run([spec dict])`
|
||||||
|
- Starts fzf inside Vim with the given spec
|
||||||
|
- `:call fzf#run({'source': 'ls'})`
|
||||||
|
2. `fzf#wrap([spec dict]) -> (dict)`
|
||||||
|
- Takes a spec for `fzf#run` and returns an extended version of it with
|
||||||
|
additional options for addressing global preferences (`g:fzf_xxx`)
|
||||||
|
- `:echo fzf#wrap({'source': 'ls'})`
|
||||||
|
- We usually wrap a spec with `fzf#wrap` before passing it to `fzf#run`
|
||||||
|
- `:call fzf#run(fzf#wrap({'source': 'ls'}))`
|
||||||
|
3. `:FZF [fzf_options string] [path string]`
|
||||||
|
- Basic fuzzy file selector
|
||||||
|
- A reference implementation for those who don't want to write VimScript to
|
||||||
|
implement custom commands
|
||||||
|
- If you're looking for more such commands, check out {fzf.vim}{2} project.
|
||||||
|
|
||||||
|
The most important of all is `fzf#run`, but it would be easier to understand
|
||||||
|
the whole if we start off with `:FZF` command.
|
||||||
|
|
||||||
|
{2} https://github.com/junegunn/fzf.vim
|
||||||
|
|
||||||
|
|
||||||
|
:FZF[!]
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
*:FZF*
|
||||||
|
>
|
||||||
|
" Look for files under current directory
|
||||||
|
:FZF
|
||||||
|
|
||||||
|
" Look for files under your home directory
|
||||||
|
:FZF ~
|
||||||
|
|
||||||
|
" With fzf command-line options
|
||||||
|
:FZF --reverse --info=inline /tmp
|
||||||
|
|
||||||
|
" Bang version starts fzf in fullscreen mode
|
||||||
|
:FZF!
|
||||||
|
<
|
||||||
|
Similarly to {ctrlp.vim}{3}, use enter key, CTRL-T, CTRL-X or CTRL-V to open
|
||||||
|
selected files in the current window, in new tabs, in horizontal splits, or in
|
||||||
|
vertical splits respectively.
|
||||||
|
|
||||||
|
Note that the environment variables `FZF_DEFAULT_COMMAND` and
|
||||||
|
`FZF_DEFAULT_OPTS` also apply here.
|
||||||
|
|
||||||
|
{3} https://github.com/kien/ctrlp.vim
|
||||||
|
|
||||||
|
|
||||||
|
< Configuration >_____________________________________________________________~
|
||||||
|
*fzf-configuration*
|
||||||
|
|
||||||
|
*g:fzf_action* *g:fzf_layout* *g:fzf_colors* *g:fzf_history_dir*
|
||||||
|
|
||||||
|
- `g:fzf_action`
|
||||||
|
- Customizable extra key bindings for opening selected files in different
|
||||||
|
ways
|
||||||
|
- `g:fzf_layout`
|
||||||
|
- Determines the size and position of fzf window
|
||||||
|
- `g:fzf_colors`
|
||||||
|
- Customizes fzf colors to match the current color scheme
|
||||||
|
- `g:fzf_history_dir`
|
||||||
|
- Enables history feature
|
||||||
|
|
||||||
|
|
||||||
|
Examples~
|
||||||
|
*fzf-examples*
|
||||||
|
>
|
||||||
|
" This is the default extra key bindings
|
||||||
|
let g:fzf_action = {
|
||||||
|
\ 'ctrl-t': 'tab split',
|
||||||
|
\ 'ctrl-x': 'split',
|
||||||
|
\ 'ctrl-v': 'vsplit' }
|
||||||
|
|
||||||
|
" An action can be a reference to a function that processes selected lines
|
||||||
|
function! s:build_quickfix_list(lines)
|
||||||
|
call setqflist(map(copy(a:lines), '{ "filename": v:val, "lnum": 1 }'))
|
||||||
|
copen
|
||||||
|
cc
|
||||||
|
endfunction
|
||||||
|
|
||||||
|
let g:fzf_action = {
|
||||||
|
\ 'ctrl-q': function('s:build_quickfix_list'),
|
||||||
|
\ 'ctrl-t': 'tab split',
|
||||||
|
\ 'ctrl-x': 'split',
|
||||||
|
\ 'ctrl-v': 'vsplit' }
|
||||||
|
|
||||||
|
" Default fzf layout
|
||||||
|
" - Popup window (center of the screen)
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||||
|
|
||||||
|
" - Popup window (center of the current window)
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true } }
|
||||||
|
|
||||||
|
" - Popup window (anchored to the bottom of the current window)
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6, 'relative': v:true, 'yoffset': 1.0 } }
|
||||||
|
|
||||||
|
" - down / up / left / right
|
||||||
|
let g:fzf_layout = { 'down': '40%' }
|
||||||
|
|
||||||
|
" - Window using a Vim command
|
||||||
|
let g:fzf_layout = { 'window': 'enew' }
|
||||||
|
let g:fzf_layout = { 'window': '-tabnew' }
|
||||||
|
let g:fzf_layout = { 'window': '10new' }
|
||||||
|
|
||||||
|
" Customize fzf colors to match your color scheme
|
||||||
|
" - fzf#wrap translates this to a set of `--color` options
|
||||||
|
let g:fzf_colors =
|
||||||
|
\ { 'fg': ['fg', 'Normal'],
|
||||||
|
\ 'bg': ['bg', 'Normal'],
|
||||||
|
\ 'hl': ['fg', 'Comment'],
|
||||||
|
\ 'fg+': ['fg', 'CursorLine', 'CursorColumn', 'Normal'],
|
||||||
|
\ 'bg+': ['bg', 'CursorLine', 'CursorColumn'],
|
||||||
|
\ 'hl+': ['fg', 'Statement'],
|
||||||
|
\ 'info': ['fg', 'PreProc'],
|
||||||
|
\ 'border': ['fg', 'Ignore'],
|
||||||
|
\ 'prompt': ['fg', 'Conditional'],
|
||||||
|
\ 'pointer': ['fg', 'Exception'],
|
||||||
|
\ 'marker': ['fg', 'Keyword'],
|
||||||
|
\ 'spinner': ['fg', 'Label'],
|
||||||
|
\ 'header': ['fg', 'Comment'] }
|
||||||
|
|
||||||
|
" Enable per-command history
|
||||||
|
" - History files will be stored in the specified directory
|
||||||
|
" - When set, CTRL-N and CTRL-P will be bound to 'next-history' and
|
||||||
|
" 'previous-history' instead of 'down' and 'up'.
|
||||||
|
let g:fzf_history_dir = '~/.local/share/fzf-history'
|
||||||
|
<
|
||||||
|
|
||||||
|
Explanation of g:fzf_colors~
|
||||||
|
*fzf-explanation-of-gfzfcolors*
|
||||||
|
|
||||||
|
`g:fzf_colors` is a dictionary mapping fzf elements to a color specification
|
||||||
|
list:
|
||||||
|
>
|
||||||
|
element: [ component, group1 [, group2, ...] ]
|
||||||
|
<
|
||||||
|
- `element` is an fzf element to apply a color to:
|
||||||
|
|
||||||
|
----------------------------+------------------------------------------------------
|
||||||
|
Element | Description ~
|
||||||
|
----------------------------+------------------------------------------------------
|
||||||
|
`fg` / `bg` / `hl` | Item (foreground / background / highlight)
|
||||||
|
`fg+` / `bg+` / `hl+` | Current item (foreground / background / highlight)
|
||||||
|
`preview-fg` / `preview-bg` | Preview window text and background
|
||||||
|
`hl` / `hl+` | Highlighted substrings (normal / current)
|
||||||
|
`gutter` | Background of the gutter on the left
|
||||||
|
`pointer` | Pointer to the current line ( `>` )
|
||||||
|
`marker` | Multi-select marker ( `>` )
|
||||||
|
`border` | Border around the window ( `--border` and `--preview` )
|
||||||
|
`header` | Header ( `--header` or `--header-lines` )
|
||||||
|
`info` | Info line (match counters)
|
||||||
|
`spinner` | Streaming input indicator
|
||||||
|
`query` | Query string
|
||||||
|
`disabled` | Query string when search is disabled
|
||||||
|
`prompt` | Prompt before query ( `> ` )
|
||||||
|
`pointer` | Pointer to the current line ( `>` )
|
||||||
|
----------------------------+------------------------------------------------------
|
||||||
|
- `component` specifies the component (`fg` / `bg`) from which to extract the
|
||||||
|
color when considering each of the following highlight groups
|
||||||
|
- `group1 [, group2, ...]` is a list of highlight groups that are searched (in
|
||||||
|
order) for a matching color definition
|
||||||
|
|
||||||
|
For example, consider the following specification:
|
||||||
|
>
|
||||||
|
'prompt': ['fg', 'Conditional', 'Comment'],
|
||||||
|
<
|
||||||
|
This means we color the prompt - using the `fg` attribute of the `Conditional`
|
||||||
|
if it exists, - otherwise use the `fg` attribute of the `Comment` highlight
|
||||||
|
group if it exists, - otherwise fall back to the default color settings for
|
||||||
|
the prompt.
|
||||||
|
|
||||||
|
You can examine the color option generated according the setting by printing
|
||||||
|
the result of `fzf#wrap()` function like so:
|
||||||
|
>
|
||||||
|
:echo fzf#wrap()
|
||||||
|
<
|
||||||
|
|
||||||
|
FZF#RUN
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
*fzf#run*
|
||||||
|
|
||||||
|
`fzf#run()` function is the core of Vim integration. It takes a single
|
||||||
|
dictionary argument, a spec, and starts fzf process accordingly. At the very
|
||||||
|
least, specify `sink` option to tell what it should do with the selected
|
||||||
|
entry.
|
||||||
|
>
|
||||||
|
call fzf#run({'sink': 'e'})
|
||||||
|
<
|
||||||
|
We haven't specified the `source`, so this is equivalent to starting fzf on
|
||||||
|
command line without standard input pipe; fzf will traverse the file system
|
||||||
|
under the current directory to get the list of files. (If
|
||||||
|
`$FZF_DEFAULT_COMMAND` is set, fzf will use the output of the command
|
||||||
|
instead.) When you select one, it will open it with the sink, `:e` command. If
|
||||||
|
you want to open it in a new tab, you can pass `:tabedit` command instead as
|
||||||
|
the sink.
|
||||||
|
>
|
||||||
|
call fzf#run({'sink': 'tabedit'})
|
||||||
|
<
|
||||||
|
You can use any shell command as the source to generate the list. The
|
||||||
|
following example will list the files managed by git. It's equivalent to
|
||||||
|
running `git ls-files | fzf` on shell.
|
||||||
|
>
|
||||||
|
call fzf#run({'source': 'git ls-files', 'sink': 'e'})
|
||||||
|
<
|
||||||
|
fzf options can be specified as `options` entry in spec dictionary.
|
||||||
|
>
|
||||||
|
call fzf#run({'sink': 'tabedit', 'options': '--multi --reverse'})
|
||||||
|
<
|
||||||
|
You can also pass a layout option if you don't want fzf window to take up the
|
||||||
|
entire screen.
|
||||||
|
>
|
||||||
|
" up / down / left / right / window are allowed
|
||||||
|
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'left': '40%'})
|
||||||
|
call fzf#run({'source': 'git ls-files', 'sink': 'e', 'window': '30vnew'})
|
||||||
|
<
|
||||||
|
`source` doesn't have to be an external shell command, you can pass a Vim
|
||||||
|
array as the source. In the next example, we pass the names of color schemes
|
||||||
|
as the source to implement a color scheme selector.
|
||||||
|
>
|
||||||
|
call fzf#run({'source': map(split(globpath(&rtp, 'colors/*.vim')),
|
||||||
|
\ 'fnamemodify(v:val, ":t:r")'),
|
||||||
|
\ 'sink': 'colo', 'left': '25%'})
|
||||||
|
<
|
||||||
|
The following table summarizes the available options.
|
||||||
|
|
||||||
|
---------------------------+---------------+----------------------------------------------------------------------
|
||||||
|
Option name | Type | Description ~
|
||||||
|
---------------------------+---------------+----------------------------------------------------------------------
|
||||||
|
`source` | string | External command to generate input to fzf (e.g. `find .` )
|
||||||
|
`source` | list | Vim list as input to fzf
|
||||||
|
`sink` | string | Vim command to handle the selected item (e.g. `e` , `tabe` )
|
||||||
|
`sink` | funcref | Function to be called with each selected item
|
||||||
|
`sinklist` (or `sink*` ) | funcref | Similar to `sink` , but takes the list of output lines at once
|
||||||
|
`exit` | funcref | Function to be called with the exit status of fzf (e.g. 0, 1, 2, 130)
|
||||||
|
`options` | string/list | Options to fzf
|
||||||
|
`dir` | string | Working directory
|
||||||
|
`up` / `down` / `left` / `right` | number/string | (Layout) Window position and size (e.g. `20` , `50%` )
|
||||||
|
`tmux` | string | (Layout) `--tmux` options (e.g. `90%,70%` )
|
||||||
|
`window` (Vim 8 / Neovim) | string | (Layout) Command to open fzf window (e.g. `vertical aboveleft 30new` )
|
||||||
|
`window` (Vim 8 / Neovim) | dict | (Layout) Popup window settings (e.g. `{'width': 0.9, 'height': 0.6}` )
|
||||||
|
---------------------------+---------------+----------------------------------------------------------------------
|
||||||
|
|
||||||
|
`options` entry can be either a string or a list. For simple cases, string
|
||||||
|
should suffice, but prefer to use list type to avoid escaping issues.
|
||||||
|
>
|
||||||
|
call fzf#run({'options': '--reverse --prompt "C:\\Program Files\\"'})
|
||||||
|
call fzf#run({'options': ['--reverse', '--prompt', 'C:\Program Files\']})
|
||||||
|
<
|
||||||
|
When `window` entry is a dictionary, fzf will start in a popup window. The
|
||||||
|
following options are allowed:
|
||||||
|
|
||||||
|
- Required:
|
||||||
|
- `width` [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||||
|
- `height` [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||||
|
- Optional:
|
||||||
|
- `yoffset` [float default 0.5 range [0 ~ 1]]
|
||||||
|
- `xoffset` [float default 0.5 range [0 ~ 1]]
|
||||||
|
- `relative` [boolean default v:false]
|
||||||
|
- `border` [string default `rounded` (`sharp` on Windows)]: Border style
|
||||||
|
- `rounded` / `sharp` / `horizontal` / `vertical` / `top` / `bottom` / `left` / `right` / `no[ne]`
|
||||||
|
|
||||||
|
|
||||||
|
FZF#WRAP
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
*fzf#wrap*
|
||||||
|
|
||||||
|
We have seen that several aspects of `:FZF` command can be configured with a
|
||||||
|
set of global option variables; different ways to open files (`g:fzf_action`),
|
||||||
|
window position and size (`g:fzf_layout`), color palette (`g:fzf_colors`),
|
||||||
|
etc.
|
||||||
|
|
||||||
|
So how can we make our custom `fzf#run` calls also respect those variables?
|
||||||
|
Simply by "wrapping" the spec dictionary with `fzf#wrap` before passing it to
|
||||||
|
`fzf#run`.
|
||||||
|
|
||||||
|
- `fzf#wrap([name string], [spec dict], [fullscreen bool]) -> (dict)`
|
||||||
|
- All arguments are optional. Usually we only need to pass a spec
|
||||||
|
dictionary.
|
||||||
|
- `name` is for managing history files. It is ignored if `g:fzf_history_dir`
|
||||||
|
is not defined.
|
||||||
|
- `fullscreen` can be either `0` or `1` (default: 0).
|
||||||
|
|
||||||
|
`fzf#wrap` takes a spec and returns an extended version of it (also a
|
||||||
|
dictionary) with additional options for addressing global preferences. You can
|
||||||
|
examine the return value of it like so:
|
||||||
|
>
|
||||||
|
echo fzf#wrap({'source': 'ls'})
|
||||||
|
<
|
||||||
|
After we "wrap" our spec, we pass it to `fzf#run`.
|
||||||
|
>
|
||||||
|
call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||||
|
<
|
||||||
|
Now it supports CTRL-T, CTRL-V, and CTRL-X key bindings (configurable via
|
||||||
|
`g:fzf_action`) and it opens fzf window according to `g:fzf_layout` setting.
|
||||||
|
|
||||||
|
To make it easier to use, let's define `LS` command.
|
||||||
|
>
|
||||||
|
command! LS call fzf#run(fzf#wrap({'source': 'ls'}))
|
||||||
|
<
|
||||||
|
Type `:LS` and see how it works.
|
||||||
|
|
||||||
|
We would like to make `:LS!` (bang version) open fzf in fullscreen, just like
|
||||||
|
`:FZF!`. Add `-bang` to command definition, and use <bang> value to set the
|
||||||
|
last `fullscreen` argument of `fzf#wrap` (see :help <bang>).
|
||||||
|
>
|
||||||
|
" On :LS!, <bang> evaluates to '!', and '!0' becomes 1
|
||||||
|
command! -bang LS call fzf#run(fzf#wrap({'source': 'ls'}, <bang>0))
|
||||||
|
<
|
||||||
|
Our `:LS` command will be much more useful if we can pass a directory argument
|
||||||
|
to it, so that something like `:LS /tmp` is possible.
|
||||||
|
>
|
||||||
|
command! -bang -complete=dir -nargs=? LS
|
||||||
|
\ call fzf#run(fzf#wrap({'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||||
|
<
|
||||||
|
Lastly, if you have enabled `g:fzf_history_dir`, you might want to assign a
|
||||||
|
unique name to our command and pass it as the first argument to `fzf#wrap`.
|
||||||
|
>
|
||||||
|
" The query history for this command will be stored as 'ls' inside g:fzf_history_dir.
|
||||||
|
" The name is ignored if g:fzf_history_dir is not defined.
|
||||||
|
command! -bang -complete=dir -nargs=? LS
|
||||||
|
\ call fzf#run(fzf#wrap('ls', {'source': 'ls', 'dir': <q-args>}, <bang>0))
|
||||||
|
<
|
||||||
|
|
||||||
|
< Global options supported by fzf#wrap >______________________________________~
|
||||||
|
*fzf-global-options-supported-by-fzf#wrap*
|
||||||
|
|
||||||
|
- `g:fzf_layout`
|
||||||
|
- `g:fzf_action`
|
||||||
|
- Works only when no custom `sink` (or `sinklist`) is provided
|
||||||
|
- Having custom sink usually means that each entry is not an ordinary
|
||||||
|
file path (e.g. name of color scheme), so we can't blindly apply the
|
||||||
|
same strategy (i.e. `tabedit some-color-scheme` doesn't make sense)
|
||||||
|
- `g:fzf_colors`
|
||||||
|
- `g:fzf_history_dir`
|
||||||
|
|
||||||
|
|
||||||
|
TIPS *fzf-tips*
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
< fzf inside terminal buffer >________________________________________________~
|
||||||
|
*fzf-inside-terminal-buffer*
|
||||||
|
|
||||||
|
|
||||||
|
On the latest versions of Vim and Neovim, fzf will start in a terminal buffer.
|
||||||
|
If you find the default ANSI colors to be different, consider configuring the
|
||||||
|
colors using `g:terminal_ansi_colors` in regular Vim or `g:terminal_color_x`
|
||||||
|
in Neovim.
|
||||||
|
|
||||||
|
>
|
||||||
|
" Terminal colors for seoul256 color scheme
|
||||||
|
if has('nvim')
|
||||||
|
let g:terminal_color_0 = '#4e4e4e'
|
||||||
|
let g:terminal_color_1 = '#d68787'
|
||||||
|
let g:terminal_color_2 = '#5f865f'
|
||||||
|
let g:terminal_color_3 = '#d8af5f'
|
||||||
|
let g:terminal_color_4 = '#85add4'
|
||||||
|
let g:terminal_color_5 = '#d7afaf'
|
||||||
|
let g:terminal_color_6 = '#87afaf'
|
||||||
|
let g:terminal_color_7 = '#d0d0d0'
|
||||||
|
let g:terminal_color_8 = '#626262'
|
||||||
|
let g:terminal_color_9 = '#d75f87'
|
||||||
|
let g:terminal_color_10 = '#87af87'
|
||||||
|
let g:terminal_color_11 = '#ffd787'
|
||||||
|
let g:terminal_color_12 = '#add4fb'
|
||||||
|
let g:terminal_color_13 = '#ffafaf'
|
||||||
|
let g:terminal_color_14 = '#87d7d7'
|
||||||
|
let g:terminal_color_15 = '#e4e4e4'
|
||||||
|
else
|
||||||
|
let g:terminal_ansi_colors = [
|
||||||
|
\ '#4e4e4e', '#d68787', '#5f865f', '#d8af5f',
|
||||||
|
\ '#85add4', '#d7afaf', '#87afaf', '#d0d0d0',
|
||||||
|
\ '#626262', '#d75f87', '#87af87', '#ffd787',
|
||||||
|
\ '#add4fb', '#ffafaf', '#87d7d7', '#e4e4e4'
|
||||||
|
\ ]
|
||||||
|
endif
|
||||||
|
<
|
||||||
|
|
||||||
|
< Starting fzf in a popup window >____________________________________________~
|
||||||
|
*fzf-starting-fzf-in-a-popup-window*
|
||||||
|
>
|
||||||
|
" Required:
|
||||||
|
" - width [float range [0 ~ 1]] or [integer range [8 ~ ]]
|
||||||
|
" - height [float range [0 ~ 1]] or [integer range [4 ~ ]]
|
||||||
|
"
|
||||||
|
" Optional:
|
||||||
|
" - xoffset [float default 0.5 range [0 ~ 1]]
|
||||||
|
" - yoffset [float default 0.5 range [0 ~ 1]]
|
||||||
|
" - relative [boolean default v:false]
|
||||||
|
" - border [string default 'rounded']: Border style
|
||||||
|
" - 'rounded' / 'sharp' / 'horizontal' / 'vertical' / 'top' / 'bottom' / 'left' / 'right'
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||||
|
<
|
||||||
|
Alternatively, you can make fzf open in a tmux popup window (requires tmux 3.2
|
||||||
|
or above) by putting `--tmux` options in `tmux` key.
|
||||||
|
>
|
||||||
|
" See `--tmux` option in `man fzf` for available options
|
||||||
|
" [center|top|bottom|left|right][,SIZE[%]][,SIZE[%]]
|
||||||
|
if exists('$TMUX')
|
||||||
|
let g:fzf_layout = { 'tmux': '90%,70%' }
|
||||||
|
else
|
||||||
|
let g:fzf_layout = { 'window': { 'width': 0.9, 'height': 0.6 } }
|
||||||
|
endif
|
||||||
|
<
|
||||||
|
|
||||||
|
< Hide statusline >___________________________________________________________~
|
||||||
|
*fzf-hide-statusline*
|
||||||
|
|
||||||
|
When fzf starts in a terminal buffer, the file type of the buffer is set to
|
||||||
|
`fzf`. So you can set up `FileType fzf` autocmd to customize the settings of
|
||||||
|
the window.
|
||||||
|
|
||||||
|
For example, if you open fzf on the bottom on the screen (e.g. `{'down':
|
||||||
|
'40%'}`), you might want to temporarily disable the statusline for a cleaner
|
||||||
|
look.
|
||||||
|
>
|
||||||
|
let g:fzf_layout = { 'down': '30%' }
|
||||||
|
autocmd! FileType fzf
|
||||||
|
autocmd FileType fzf set laststatus=0 noshowmode noruler
|
||||||
|
\| autocmd BufLeave <buffer> set laststatus=2 showmode ruler
|
||||||
|
<
|
||||||
|
|
||||||
|
LICENSE *fzf-license*
|
||||||
|
==============================================================================
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013-2025 Junegunn Choi
|
||||||
|
|
||||||
|
==============================================================================
|
||||||
|
vim:tw=78:sw=2:ts=2:ft=help:norl:nowrap:
|
||||||
20
go.mod
Normal file
20
go.mod
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
module github.com/junegunn/fzf
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/charlievieth/fastwalk v1.0.14
|
||||||
|
github.com/gdamore/tcell/v2 v2.9.0
|
||||||
|
github.com/junegunn/go-shellwords v0.0.0-20250127100254-2aa3b3277741
|
||||||
|
github.com/mattn/go-isatty v0.0.20
|
||||||
|
github.com/rivo/uniseg v0.4.7
|
||||||
|
golang.org/x/sys v0.35.0
|
||||||
|
golang.org/x/term v0.34.0
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/gdamore/encoding v1.0.1 // indirect
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
|
golang.org/x/text v0.28.0 // indirect
|
||||||
|
)
|
||||||
|
|
||||||
|
go 1.23.0
|
||||||
55
go.sum
Normal file
55
go.sum
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
github.com/charlievieth/fastwalk v1.0.14 h1:3Eh5uaFGwHZd8EGwTjJnSpBkfwfsak9h6ICgnWlhAyg=
|
||||||
|
github.com/charlievieth/fastwalk v1.0.14/go.mod h1:diVcUreiU1aQ4/Wu3NbxxH4/KYdKpLDojrQ1Bb2KgNY=
|
||||||
|
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||||
|
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||||
|
github.com/gdamore/tcell/v2 v2.9.0 h1:N6t+eqK7/xwtRPwxzs1PXeRWnm0H9l02CrgJ7DLn1ys=
|
||||||
|
github.com/gdamore/tcell/v2 v2.9.0/go.mod h1:8/ZoqM9rxzYphT9tH/9LnunhV9oPBqwS8WHGYm5nrmo=
|
||||||
|
github.com/junegunn/go-shellwords v0.0.0-20250127100254-2aa3b3277741 h1:7dYDtfMDfKzjT+DVfIS4iqknSEKtZpEcXtu6vuaasHs=
|
||||||
|
github.com/junegunn/go-shellwords v0.0.0-20250127100254-2aa3b3277741/go.mod h1:6EILKtGpo5t+KLb85LNZLAF6P9LKp78hJI80PXMcn3c=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
|
||||||
|
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
|
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
|
||||||
|
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||||
|
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
403
install
403
install
@@ -2,12 +2,14 @@
|
|||||||
|
|
||||||
set -u
|
set -u
|
||||||
|
|
||||||
version=0.16.3
|
version=0.66.0
|
||||||
auto_completion=
|
auto_completion=
|
||||||
key_bindings=
|
key_bindings=
|
||||||
update_config=2
|
update_config=2
|
||||||
binary_arch=
|
shells="bash zsh fish"
|
||||||
allow_legacy=
|
prefix='~/.fzf'
|
||||||
|
prefix_expand=~/.fzf
|
||||||
|
fish_dir=${XDG_CONFIG_HOME:-$HOME/.config}/fish
|
||||||
|
|
||||||
help() {
|
help() {
|
||||||
cat << EOF
|
cat << EOF
|
||||||
@@ -17,12 +19,14 @@ usage: $0 [OPTIONS]
|
|||||||
--bin Download fzf binary only; Do not generate ~/.fzf.{bash,zsh}
|
--bin Download fzf binary only; Do not generate ~/.fzf.{bash,zsh}
|
||||||
--all Download fzf binary and update configuration files
|
--all Download fzf binary and update configuration files
|
||||||
to enable key bindings and fuzzy completion
|
to enable key bindings and fuzzy completion
|
||||||
|
--xdg Generate files under \$XDG_CONFIG_HOME/fzf
|
||||||
--[no-]key-bindings Enable/disable key bindings (CTRL-T, CTRL-R, ALT-C)
|
--[no-]key-bindings Enable/disable key bindings (CTRL-T, CTRL-R, ALT-C)
|
||||||
--[no-]completion Enable/disable fuzzy completion (bash & zsh)
|
--[no-]completion Enable/disable fuzzy completion (bash & zsh)
|
||||||
--[no-]update-rc Whether or not to update shell configuration files
|
--[no-]update-rc Whether or not to update shell configuration files
|
||||||
|
|
||||||
--32 Download 32-bit binary
|
--no-bash Do not set up bash configuration
|
||||||
--64 Download 64-bit binary
|
--no-zsh Do not set up zsh configuration
|
||||||
|
--no-fish Do not set up fish configuration
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -36,17 +40,22 @@ for opt in "$@"; do
|
|||||||
auto_completion=1
|
auto_completion=1
|
||||||
key_bindings=1
|
key_bindings=1
|
||||||
update_config=1
|
update_config=1
|
||||||
allow_legacy=1
|
|
||||||
;;
|
;;
|
||||||
--key-bindings) key_bindings=1 ;;
|
--xdg)
|
||||||
--no-key-bindings) key_bindings=0 ;;
|
prefix='"${XDG_CONFIG_HOME:-$HOME/.config}"/fzf/fzf'
|
||||||
--completion) auto_completion=1 ;;
|
prefix_expand=${XDG_CONFIG_HOME:-$HOME/.config}/fzf/fzf
|
||||||
--no-completion) auto_completion=0 ;;
|
mkdir -p "${XDG_CONFIG_HOME:-$HOME/.config}/fzf"
|
||||||
--update-rc) update_config=1 ;;
|
;;
|
||||||
--no-update-rc) update_config=0 ;;
|
--key-bindings) key_bindings=1 ;;
|
||||||
--32) binary_arch=386 ;;
|
--no-key-bindings) key_bindings=0 ;;
|
||||||
--64) binary_arch=amd64 ;;
|
--completion) auto_completion=1 ;;
|
||||||
--bin) ;;
|
--no-completion) auto_completion=0 ;;
|
||||||
|
--update-rc) update_config=1 ;;
|
||||||
|
--no-update-rc) update_config=0 ;;
|
||||||
|
--bin) ;;
|
||||||
|
--no-bash) shells=${shells/bash/} ;;
|
||||||
|
--no-zsh) shells=${shells/zsh/} ;;
|
||||||
|
--no-fish) shells=${shells/fish/} ;;
|
||||||
*)
|
*)
|
||||||
echo "unknown option: $opt"
|
echo "unknown option: $opt"
|
||||||
help
|
help
|
||||||
@@ -56,53 +65,47 @@ for opt in "$@"; do
|
|||||||
done
|
done
|
||||||
|
|
||||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
fzf_base="$(pwd)"
|
fzf_base=$(pwd)
|
||||||
|
fzf_base_esc=$(printf %q "$fzf_base")
|
||||||
|
|
||||||
ask() {
|
ask() {
|
||||||
# If stdin is a tty, we are "interactive".
|
while true; do
|
||||||
# non-interactive shell: wait for a linefeed
|
read -p "$1 ([y]/n) " -r
|
||||||
# interactive shell: continue after a single keypress
|
REPLY=${REPLY:-"y"}
|
||||||
read_n=$([ -t 0 ] && echo "-n 1")
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
return 1
|
||||||
read -p "$1 ([y]/n) " $read_n -r
|
elif [[ $REPLY =~ ^[Nn]$ ]]; then
|
||||||
echo
|
return 0
|
||||||
[[ $REPLY =~ ^[Nn]$ ]]
|
fi
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
check_binary() {
|
check_binary() {
|
||||||
echo -n " - Checking fzf executable ... "
|
echo -n " - Checking fzf executable ... "
|
||||||
local output
|
local output
|
||||||
output=$("$fzf_base"/bin/fzf --version 2>&1)
|
output=$(FZF_DEFAULT_OPTS= "$fzf_base"/bin/fzf --version 2>&1)
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
echo "Error: $output"
|
echo "Error: $output"
|
||||||
binary_error="Invalid binary"
|
binary_error="Invalid binary"
|
||||||
elif [ "$version" != "$output" ]; then
|
|
||||||
echo "$output != $version"
|
|
||||||
binary_error="Invalid version"
|
|
||||||
else
|
else
|
||||||
echo "$output"
|
output=${output/ */}
|
||||||
binary_error=""
|
if [ "$version" != "$output" ]; then
|
||||||
return 0
|
echo "$output != $version"
|
||||||
|
binary_error="Invalid version"
|
||||||
|
else
|
||||||
|
echo "$output"
|
||||||
|
binary_error=""
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
rm -f "$fzf_base"/bin/fzf
|
rm -f "$fzf_base"/bin/fzf
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
symlink() {
|
|
||||||
echo " - Creating symlink: bin/$1 -> bin/fzf"
|
|
||||||
(cd "$fzf_base"/bin &&
|
|
||||||
rm -f fzf &&
|
|
||||||
ln -sf $1 fzf)
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
binary_error="Failed to create symlink"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
link_fzf_in_path() {
|
link_fzf_in_path() {
|
||||||
if which_fzf="$(command -v fzf)"; then
|
if which_fzf="$(command -v fzf)"; then
|
||||||
echo " - Found in \$PATH"
|
echo ' - Found in $PATH'
|
||||||
echo " - Creating symlink: $which_fzf -> bin/fzf"
|
echo " - Creating symlink: bin/fzf -> $which_fzf"
|
||||||
(cd "$fzf_base"/bin && rm -f fzf && ln -sf "$which_fzf" fzf)
|
(cd "$fzf_base"/bin && rm -f fzf && ln -sf "$which_fzf" fzf)
|
||||||
check_binary && return
|
check_binary && return
|
||||||
fi
|
fi
|
||||||
@@ -110,25 +113,32 @@ link_fzf_in_path() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try_curl() {
|
try_curl() {
|
||||||
command -v curl > /dev/null && curl -fL $1 | tar -xz
|
command -v curl > /dev/null &&
|
||||||
|
if [[ $1 =~ tar.gz$ ]]; then
|
||||||
|
curl -fL $1 | tar --no-same-owner -xzf -
|
||||||
|
else
|
||||||
|
local temp=${TMPDIR:-/tmp}/fzf.zip
|
||||||
|
curl -fLo "$temp" $1 && unzip -o "$temp" && rm -f "$temp"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
try_wget() {
|
try_wget() {
|
||||||
command -v wget > /dev/null && wget -O - $1 | tar -xz
|
command -v wget > /dev/null &&
|
||||||
|
if [[ $1 =~ tar.gz$ ]]; then
|
||||||
|
wget -O - $1 | tar --no-same-owner -xzf -
|
||||||
|
else
|
||||||
|
local temp=${TMPDIR:-/tmp}/fzf.zip
|
||||||
|
wget -O "$temp" $1 && unzip -o "$temp" && rm -f "$temp"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
download() {
|
download() {
|
||||||
echo "Downloading bin/fzf ..."
|
echo "Downloading bin/fzf ..."
|
||||||
if [[ ! "$version" =~ alpha ]]; then
|
if [ -x "$fzf_base"/bin/fzf ]; then
|
||||||
if [ -x "$fzf_base"/bin/fzf ]; then
|
echo " - Already exists"
|
||||||
echo " - Already exists"
|
check_binary && return
|
||||||
check_binary && return
|
|
||||||
fi
|
|
||||||
if [ -x "$fzf_base"/bin/$1 ]; then
|
|
||||||
symlink $1 && check_binary && return
|
|
||||||
fi
|
|
||||||
link_fzf_in_path && return
|
|
||||||
fi
|
fi
|
||||||
|
link_fzf_in_path && return
|
||||||
mkdir -p "$fzf_base"/bin && cd "$fzf_base"/bin
|
mkdir -p "$fzf_base"/bin && cd "$fzf_base"/bin
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
binary_error="Failed to create bin directory"
|
binary_error="Failed to create bin directory"
|
||||||
@@ -136,9 +146,7 @@ download() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
local url
|
local url
|
||||||
[[ "$version" =~ alpha ]] &&
|
url=https://github.com/junegunn/fzf/releases/download/v$version/${1}
|
||||||
url=https://github.com/junegunn/fzf-bin/releases/download/alpha/${1}.tgz ||
|
|
||||||
url=https://github.com/junegunn/fzf-bin/releases/download/$version/${1}.tgz
|
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
if ! (try_curl $url || try_wget $url); then
|
if ! (try_curl $url || try_wget $url); then
|
||||||
set +o pipefail
|
set +o pipefail
|
||||||
@@ -147,105 +155,40 @@ download() {
|
|||||||
fi
|
fi
|
||||||
set +o pipefail
|
set +o pipefail
|
||||||
|
|
||||||
if [ ! -f $1 ]; then
|
if [ ! -f fzf ]; then
|
||||||
binary_error="Failed to download ${1}"
|
binary_error="Failed to download ${1}"
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
chmod +x $1 && symlink $1 && check_binary
|
chmod +x fzf && check_binary
|
||||||
}
|
}
|
||||||
|
|
||||||
# Try to download binary executable
|
# Try to download binary executable
|
||||||
archi=$(uname -sm)
|
archi=$(uname -smo 2> /dev/null || uname -sm)
|
||||||
binary_available=1
|
binary_available=1
|
||||||
binary_error=""
|
binary_error=""
|
||||||
case "$archi" in
|
case "$archi" in
|
||||||
Darwin\ *64) download fzf-$version-darwin_${binary_arch:-amd64} ;;
|
Darwin\ arm64*) download fzf-$version-darwin_arm64.tar.gz ;;
|
||||||
Darwin\ *86) download fzf-$version-darwin_${binary_arch:-386} ;;
|
Darwin\ x86_64*) download fzf-$version-darwin_amd64.tar.gz ;;
|
||||||
Linux\ *64) download fzf-$version-linux_${binary_arch:-amd64} ;;
|
Linux\ armv5*) download fzf-$version-linux_armv5.tar.gz ;;
|
||||||
Linux\ *86) download fzf-$version-linux_${binary_arch:-386} ;;
|
Linux\ armv6*) download fzf-$version-linux_armv6.tar.gz ;;
|
||||||
Linux\ armv5*) download fzf-$version-linux_${binary_arch:-arm5} ;;
|
Linux\ armv7*) download fzf-$version-linux_armv7.tar.gz ;;
|
||||||
Linux\ armv6*) download fzf-$version-linux_${binary_arch:-arm6} ;;
|
Linux\ armv8*) download fzf-$version-linux_arm64.tar.gz ;;
|
||||||
Linux\ armv7*) download fzf-$version-linux_${binary_arch:-arm7} ;;
|
Linux\ aarch64\ Android) download fzf-$version-android_arm64.tar.gz ;;
|
||||||
Linux\ armv8*) download fzf-$version-linux_${binary_arch:-arm8} ;;
|
Linux\ aarch64*) download fzf-$version-linux_arm64.tar.gz ;;
|
||||||
FreeBSD\ *64) download fzf-$version-freebsd_${binary_arch:-amd64} ;;
|
Linux\ loongarch64*) download fzf-$version-linux_loong64.tar.gz ;;
|
||||||
FreeBSD\ *86) download fzf-$version-freebsd_${binary_arch:-386} ;;
|
Linux\ ppc64le*) download fzf-$version-linux_ppc64le.tar.gz ;;
|
||||||
OpenBSD\ *64) download fzf-$version-openbsd_${binary_arch:-amd64} ;;
|
Linux\ *64*) download fzf-$version-linux_amd64.tar.gz ;;
|
||||||
OpenBSD\ *86) download fzf-$version-openbsd_${binary_arch:-386} ;;
|
Linux\ s390x*) download fzf-$version-linux_s390x.tar.gz ;;
|
||||||
*) binary_available=0 binary_error=1 ;;
|
FreeBSD\ *64*) download fzf-$version-freebsd_amd64.tar.gz ;;
|
||||||
|
OpenBSD\ *64*) download fzf-$version-openbsd_amd64.tar.gz ;;
|
||||||
|
CYGWIN*\ *64*) download fzf-$version-windows_amd64.zip ;;
|
||||||
|
MINGW*\ *64*) download fzf-$version-windows_amd64.zip ;;
|
||||||
|
MSYS*\ *64*) download fzf-$version-windows_amd64.zip ;;
|
||||||
|
Windows*\ *64*) download fzf-$version-windows_amd64.zip ;;
|
||||||
|
*) binary_available=0 binary_error=1 ;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
install_ruby_fzf() {
|
|
||||||
if [ -z "$allow_legacy" ]; then
|
|
||||||
ask "Do you want to install legacy Ruby version instead?" && exit 1
|
|
||||||
fi
|
|
||||||
echo "Installing legacy Ruby version ..."
|
|
||||||
|
|
||||||
# ruby executable
|
|
||||||
echo -n "Checking Ruby executable ... "
|
|
||||||
ruby=$(command -v ruby)
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "ruby executable not found !!!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# System ruby is preferred
|
|
||||||
system_ruby=/usr/bin/ruby
|
|
||||||
if [ -x $system_ruby ] && [ $system_ruby != "$ruby" ]; then
|
|
||||||
$system_ruby --disable-gems -rcurses -e0 2> /dev/null
|
|
||||||
[ $? -eq 0 ] && ruby=$system_ruby
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "OK ($ruby)"
|
|
||||||
|
|
||||||
# Curses-support
|
|
||||||
echo -n "Checking Curses support ... "
|
|
||||||
"$ruby" -rcurses -e0 2> /dev/null
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo "OK"
|
|
||||||
else
|
|
||||||
echo "Not found"
|
|
||||||
echo "Installing 'curses' gem ... "
|
|
||||||
if (( EUID )); then
|
|
||||||
/usr/bin/env gem install curses --user-install
|
|
||||||
else
|
|
||||||
/usr/bin/env gem install curses
|
|
||||||
fi
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo
|
|
||||||
echo "Failed to install 'curses' gem."
|
|
||||||
if [[ $(uname -r) =~ 'ARCH' ]]; then
|
|
||||||
echo "Make sure that base-devel package group is installed."
|
|
||||||
fi
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ruby version
|
|
||||||
echo -n "Checking Ruby version ... "
|
|
||||||
"$ruby" -e 'exit RUBY_VERSION >= "1.9"'
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo ">= 1.9"
|
|
||||||
"$ruby" --disable-gems -rcurses -e0 2> /dev/null
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
fzf_cmd="$ruby --disable-gems $fzf_base/fzf"
|
|
||||||
else
|
|
||||||
fzf_cmd="$ruby $fzf_base/fzf"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "< 1.9"
|
|
||||||
fzf_cmd="$ruby $fzf_base/fzf"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create fzf script
|
|
||||||
echo -n "Creating wrapper script for fzf ... "
|
|
||||||
rm -f "$fzf_base"/bin/fzf
|
|
||||||
echo "#!/bin/sh" > "$fzf_base"/bin/fzf
|
|
||||||
echo "$fzf_cmd \"\$@\"" >> "$fzf_base"/bin/fzf
|
|
||||||
chmod +x "$fzf_base"/bin/fzf
|
|
||||||
echo "OK"
|
|
||||||
}
|
|
||||||
|
|
||||||
cd "$fzf_base"
|
cd "$fzf_base"
|
||||||
if [ -n "$binary_error" ]; then
|
if [ -n "$binary_error" ]; then
|
||||||
if [ $binary_available -eq 0 ]; then
|
if [ $binary_available -eq 0 ]; then
|
||||||
@@ -254,25 +197,36 @@ if [ -n "$binary_error" ]; then
|
|||||||
echo " - $binary_error !!!"
|
echo " - $binary_error !!!"
|
||||||
fi
|
fi
|
||||||
if command -v go > /dev/null; then
|
if command -v go > /dev/null; then
|
||||||
echo -n "Building binary (go get -u github.com/junegunn/fzf/src/fzf) ... "
|
echo -n "Building binary (go install github.com/junegunn/fzf) ... "
|
||||||
if [ -z "${GOPATH-}" ]; then
|
if [ -z "${GOPATH-}" ]; then
|
||||||
export GOPATH="${TMPDIR:-/tmp}/fzf-gopath"
|
export GOPATH="${TMPDIR:-/tmp}/fzf-gopath"
|
||||||
mkdir -p "$GOPATH"
|
mkdir -p "$GOPATH"
|
||||||
fi
|
fi
|
||||||
if go get -u github.com/junegunn/fzf/src/fzf; then
|
if go install -ldflags "-s -w -X main.version=$version -X main.revision=go-install" github.com/junegunn/fzf; then
|
||||||
echo "OK"
|
echo "OK"
|
||||||
cp "$GOPATH/bin/fzf" "$fzf_base/bin/"
|
cp "$GOPATH/bin/fzf" "$fzf_base/bin/"
|
||||||
else
|
else
|
||||||
echo "Failed to build binary ..."
|
echo "Failed to build binary. Installation failed."
|
||||||
install_ruby_fzf
|
exit 1
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "go executable not found. Cannot build binary ..."
|
echo "go executable not found. Installation failed."
|
||||||
install_ruby_fzf
|
exit 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
[[ "$*" =~ "--bin" ]] && exit 0
|
[[ $* =~ "--bin" ]] && exit 0
|
||||||
|
|
||||||
|
for s in $shells; do
|
||||||
|
if ! command -v "$s" > /dev/null; then
|
||||||
|
shells=${shells/$s/}
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ ${#shells} -lt 3 ]]; then
|
||||||
|
echo "No shell configuration to be updated."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
# Auto-completion
|
# Auto-completion
|
||||||
if [ -z "$auto_completion" ]; then
|
if [ -z "$auto_completion" ]; then
|
||||||
@@ -287,13 +241,12 @@ if [ -z "$key_bindings" ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
echo
|
echo
|
||||||
has_zsh=$(command -v zsh > /dev/null && echo 1 || echo 0)
|
|
||||||
shells=$([ $has_zsh -eq 1 ] && echo "bash zsh" || echo "bash")
|
|
||||||
for shell in $shells; do
|
for shell in $shells; do
|
||||||
echo -n "Generate ~/.fzf.$shell ... "
|
[[ $shell == fish ]] && continue
|
||||||
src=~/.fzf.${shell}
|
src=${prefix_expand}.${shell}
|
||||||
|
echo -n "Generate $src ... "
|
||||||
|
|
||||||
fzf_completion="[[ \$- == *i* ]] && source \"$fzf_base/shell/completion.${shell}\" 2> /dev/null"
|
fzf_completion="source \"$fzf_base/shell/completion.${shell}\""
|
||||||
if [ $auto_completion -eq 0 ]; then
|
if [ $auto_completion -eq 0 ]; then
|
||||||
fzf_completion="# $fzf_completion"
|
fzf_completion="# $fzf_completion"
|
||||||
fi
|
fi
|
||||||
@@ -303,13 +256,23 @@ for shell in $shells; do
|
|||||||
fzf_key_bindings="# $fzf_key_bindings"
|
fzf_key_bindings="# $fzf_key_bindings"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cat > $src << EOF
|
cat > "$src" << EOF
|
||||||
# Setup fzf
|
# Setup fzf
|
||||||
# ---------
|
# ---------
|
||||||
if [[ ! "\$PATH" == *$fzf_base/bin* ]]; then
|
if [[ ! "\$PATH" == *$fzf_base_esc/bin* ]]; then
|
||||||
export PATH="\$PATH:$fzf_base/bin"
|
PATH="\${PATH:+\${PATH}:}$fzf_base/bin"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
if [[ $auto_completion -eq 1 ]] && [[ $key_bindings -eq 1 ]]; then
|
||||||
|
if [[ $shell == zsh ]]; then
|
||||||
|
echo "source <(fzf --$shell)" >> "$src"
|
||||||
|
else
|
||||||
|
echo "eval \"\$(fzf --$shell)\"" >> "$src"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
cat >> "$src" << EOF
|
||||||
# Auto-completion
|
# Auto-completion
|
||||||
# ---------------
|
# ---------------
|
||||||
$fzf_completion
|
$fzf_completion
|
||||||
@@ -317,71 +280,75 @@ $fzf_completion
|
|||||||
# Key bindings
|
# Key bindings
|
||||||
# ------------
|
# ------------
|
||||||
$fzf_key_bindings
|
$fzf_key_bindings
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
|
fi
|
||||||
echo "OK"
|
echo "OK"
|
||||||
done
|
done
|
||||||
|
|
||||||
# fish
|
# fish
|
||||||
has_fish=$(command -v fish > /dev/null && echo 1 || echo 0)
|
if [[ $shells =~ fish ]]; then
|
||||||
if [ $has_fish -eq 1 ]; then
|
|
||||||
echo -n "Update fish_user_paths ... "
|
echo -n "Update fish_user_paths ... "
|
||||||
fish << EOF
|
fish << EOF
|
||||||
echo \$fish_user_paths | grep $fzf_base/bin > /dev/null
|
echo \$fish_user_paths | \grep "$fzf_base"/bin > /dev/null
|
||||||
or set --universal fish_user_paths \$fish_user_paths $fzf_base/bin
|
or set --universal fish_user_paths \$fish_user_paths "$fzf_base"/bin
|
||||||
EOF
|
EOF
|
||||||
[ $? -eq 0 ] && echo "OK" || echo "Failed"
|
[ $? -eq 0 ] && echo "OK" || echo "Failed"
|
||||||
|
|
||||||
mkdir -p ~/.config/fish/functions
|
|
||||||
if [ -e ~/.config/fish/functions/fzf.fish ]; then
|
|
||||||
echo -n "Remove unnecessary ~/.config/fish/functions/fzf.fish ... "
|
|
||||||
rm -f ~/.config/fish/functions/fzf.fish && echo "OK" || echo "Failed"
|
|
||||||
fi
|
|
||||||
|
|
||||||
fish_binding=~/.config/fish/functions/fzf_key_bindings.fish
|
|
||||||
if [ $key_bindings -ne 0 ]; then
|
|
||||||
echo -n "Symlink $fish_binding ... "
|
|
||||||
ln -sf "$fzf_base/shell/key-bindings.fish" \
|
|
||||||
"$fish_binding" && echo "OK" || echo "Failed"
|
|
||||||
else
|
|
||||||
echo -n "Removing $fish_binding ... "
|
|
||||||
rm -f "$fish_binding"
|
|
||||||
echo "OK"
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
append_line() {
|
append_line() {
|
||||||
set -e
|
local update line file pat lines
|
||||||
|
|
||||||
local update line file pat lno
|
|
||||||
update="$1"
|
update="$1"
|
||||||
line="$2"
|
line="$2"
|
||||||
file="$3"
|
file="$3"
|
||||||
pat="${4:-}"
|
pat="${4:-}"
|
||||||
|
lines=""
|
||||||
|
|
||||||
echo "Update $file:"
|
echo "Update $file:"
|
||||||
echo " - $line"
|
echo " - $line"
|
||||||
[ -f "$file" ] || touch "$file"
|
if [ -f "$file" ]; then
|
||||||
if [ $# -lt 4 ]; then
|
if [ $# -lt 4 ]; then
|
||||||
lno=$(\grep -nF "$line" "$file" | sed 's/:.*//' | tr '\n' ' ')
|
lines=$(\grep -nF "$line" "$file")
|
||||||
else
|
|
||||||
lno=$(\grep -nF "$pat" "$file" | sed 's/:.*//' | tr '\n' ' ')
|
|
||||||
fi
|
|
||||||
if [ -n "$lno" ]; then
|
|
||||||
echo " - Already exists: line #$lno"
|
|
||||||
else
|
|
||||||
if [ $update -eq 1 ]; then
|
|
||||||
echo >> "$file"
|
|
||||||
echo "$line" >> "$file"
|
|
||||||
echo " + Added"
|
|
||||||
else
|
else
|
||||||
echo " ~ Skipped"
|
lines=$(\grep -nF "$pat" "$file")
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ -n "$lines" ]; then
|
||||||
|
echo " - Already exists:"
|
||||||
|
sed 's/^/ Line /' <<< "$lines"
|
||||||
|
|
||||||
|
update=0
|
||||||
|
if ! \grep -qv "^[0-9]*:[[:space:]]*#" <<< "$lines"; then
|
||||||
|
echo " - But they all seem to be commented"
|
||||||
|
ask " - Continue modifying $file?"
|
||||||
|
update=$?
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
set -e
|
||||||
|
if [ "$update" -eq 1 ]; then
|
||||||
|
[ -f "$file" ] && echo >> "$file"
|
||||||
|
echo "$line" >> "$file"
|
||||||
|
echo " + Added"
|
||||||
|
else
|
||||||
|
echo " ~ Skipped"
|
||||||
|
fi
|
||||||
|
|
||||||
echo
|
echo
|
||||||
set +e
|
set +e
|
||||||
}
|
}
|
||||||
|
|
||||||
|
create_file() {
|
||||||
|
local file="$1"
|
||||||
|
shift
|
||||||
|
echo "Create $file:"
|
||||||
|
for line in "$@"; do
|
||||||
|
echo " $line"
|
||||||
|
echo "$line" >> "$file"
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
if [ $update_config -eq 2 ]; then
|
if [ $update_config -eq 2 ]; then
|
||||||
echo
|
echo
|
||||||
ask "Do you want to update your shell configuration files?"
|
ask "Do you want to update your shell configuration files?"
|
||||||
@@ -389,23 +356,45 @@ if [ $update_config -eq 2 ]; then
|
|||||||
fi
|
fi
|
||||||
echo
|
echo
|
||||||
for shell in $shells; do
|
for shell in $shells; do
|
||||||
|
[[ $shell == fish ]] && continue
|
||||||
[ $shell = zsh ] && dest=${ZDOTDIR:-~}/.zshrc || dest=~/.bashrc
|
[ $shell = zsh ] && dest=${ZDOTDIR:-~}/.zshrc || dest=~/.bashrc
|
||||||
append_line $update_config "[ -f ~/.fzf.${shell} ] && source ~/.fzf.${shell}" "$dest" "~/.fzf.${shell}"
|
append_line $update_config "[ -f ${prefix}.${shell} ] && source ${prefix}.${shell}" "$dest" "${prefix}.${shell}"
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ $key_bindings -eq 1 ] && [ $has_fish -eq 1 ]; then
|
if [ $key_bindings -eq 1 ] && [[ $shells =~ fish ]]; then
|
||||||
bind_file=~/.config/fish/functions/fish_user_key_bindings.fish
|
bind_file="${fish_dir}/functions/fish_user_key_bindings.fish"
|
||||||
append_line $update_config "fzf_key_bindings" "$bind_file"
|
if [ ! -e "$bind_file" ]; then
|
||||||
|
mkdir -p "${fish_dir}/functions"
|
||||||
|
create_file "$bind_file" \
|
||||||
|
'function fish_user_key_bindings' \
|
||||||
|
' fzf --fish | source' \
|
||||||
|
'end'
|
||||||
|
else
|
||||||
|
echo "Check $bind_file:"
|
||||||
|
lno=$(\grep -nF "fzf_key_bindings" "$bind_file" | sed 's/:.*//' | tr '\n' ' ')
|
||||||
|
if [[ -n $lno ]]; then
|
||||||
|
echo " ** Found 'fzf_key_bindings' in line #$lno"
|
||||||
|
echo " ** You have to replace the line to 'fzf --fish | source'"
|
||||||
|
echo
|
||||||
|
else
|
||||||
|
echo " - Clear"
|
||||||
|
echo
|
||||||
|
append_line $update_config "fzf --fish | source" "$bind_file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ $update_config -eq 1 ]; then
|
if [ $update_config -eq 1 ]; then
|
||||||
echo 'Finished. Restart your shell or reload config file.'
|
echo 'Finished. Restart your shell or reload config file.'
|
||||||
echo ' source ~/.bashrc # bash'
|
if [[ $shells =~ bash ]]; then
|
||||||
[ $has_zsh -eq 1 ] && echo " source ${ZDOTDIR:-~}/.zshrc # zsh"
|
echo -n ' source ~/.bashrc # bash'
|
||||||
[ $has_fish -eq 1 ] && [ $key_bindings -eq 1 ] && echo ' fzf_key_bindings # fish'
|
[[ $archi =~ Darwin ]] && echo -n ' (.bashrc should be loaded from .bash_profile)'
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
[[ $shells =~ zsh ]] && echo " source ${ZDOTDIR:-~}/.zshrc # zsh"
|
||||||
|
[[ $shells =~ fish ]] && [ $key_bindings -eq 1 ] && echo ' fzf_key_bindings # fish'
|
||||||
echo
|
echo
|
||||||
echo 'Use uninstall script to remove fzf.'
|
echo 'Use uninstall script to remove fzf.'
|
||||||
echo
|
echo
|
||||||
fi
|
fi
|
||||||
echo 'For more information, see: https://github.com/junegunn/fzf'
|
echo 'For more information, see: https://github.com/junegunn/fzf'
|
||||||
|
|
||||||
|
|||||||
65
install.ps1
Normal file
65
install.ps1
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
$version="0.66.0"
|
||||||
|
|
||||||
|
$fzf_base=Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||||
|
|
||||||
|
function check_binary () {
|
||||||
|
Write-Host " - Checking fzf executable ... " -NoNewline
|
||||||
|
$output=cmd /c $fzf_base\bin\fzf.exe --version 2>&1
|
||||||
|
if (-not $?) {
|
||||||
|
Write-Host "Error: $output"
|
||||||
|
$binary_error="Invalid binary"
|
||||||
|
} else {
|
||||||
|
$output=(-Split $output)[0]
|
||||||
|
if ($version -ne $output) {
|
||||||
|
Write-Host "$output != $version"
|
||||||
|
$binary_error="Invalid version"
|
||||||
|
} else {
|
||||||
|
Write-Host "$output"
|
||||||
|
$binary_error=""
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Remove-Item "$fzf_base\bin\fzf.exe"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function download {
|
||||||
|
param($file)
|
||||||
|
Write-Host "Downloading bin/fzf ..."
|
||||||
|
if (Test-Path "$fzf_base\bin\fzf.exe") {
|
||||||
|
Write-Host " - Already exists"
|
||||||
|
if (check_binary) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (-not (Test-Path "$fzf_base\bin")) {
|
||||||
|
md "$fzf_base\bin"
|
||||||
|
}
|
||||||
|
if (-not $?) {
|
||||||
|
$binary_error="Failed to create bin directory"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cd "$fzf_base\bin"
|
||||||
|
$url="https://github.com/junegunn/fzf/releases/download/v$version/$file"
|
||||||
|
$temp=$env:TMP + "\fzf.zip"
|
||||||
|
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||||
|
if ($PSVersionTable.PSVersion.Major -ge 3) {
|
||||||
|
Invoke-WebRequest -Uri $url -OutFile $temp
|
||||||
|
} else {
|
||||||
|
(New-Object Net.WebClient).DownloadFile($url, $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath("$temp"))
|
||||||
|
}
|
||||||
|
if ($?) {
|
||||||
|
(Microsoft.PowerShell.Archive\Expand-Archive -Path $temp -DestinationPath .); (Remove-Item $temp)
|
||||||
|
} else {
|
||||||
|
$binary_error="Failed to download with powershell"
|
||||||
|
}
|
||||||
|
if (-not (Test-Path fzf.exe)) {
|
||||||
|
$binary_error="Failed to download $file"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
echo y | icacls $fzf_base\bin\fzf.exe /grant Administrator:F ; check_binary >$null
|
||||||
|
}
|
||||||
|
|
||||||
|
download "fzf-$version-windows_amd64.zip"
|
||||||
|
|
||||||
|
Write-Host 'For more information, see: https://github.com/junegunn/fzf'
|
||||||
101
main.go
Normal file
101
main.go
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
fzf "github.com/junegunn/fzf/src"
|
||||||
|
"github.com/junegunn/fzf/src/protector"
|
||||||
|
)
|
||||||
|
|
||||||
|
var version = "0.66"
|
||||||
|
var revision = "devel"
|
||||||
|
|
||||||
|
//go:embed shell/key-bindings.bash
|
||||||
|
var bashKeyBindings []byte
|
||||||
|
|
||||||
|
//go:embed shell/completion.bash
|
||||||
|
var bashCompletion []byte
|
||||||
|
|
||||||
|
//go:embed shell/key-bindings.zsh
|
||||||
|
var zshKeyBindings []byte
|
||||||
|
|
||||||
|
//go:embed shell/completion.zsh
|
||||||
|
var zshCompletion []byte
|
||||||
|
|
||||||
|
//go:embed shell/key-bindings.fish
|
||||||
|
var fishKeyBindings []byte
|
||||||
|
|
||||||
|
//go:embed man/man1/fzf.1
|
||||||
|
var manPage []byte
|
||||||
|
|
||||||
|
func printScript(label string, content []byte) {
|
||||||
|
fmt.Println("### " + label + " ###")
|
||||||
|
fmt.Println(strings.TrimSpace(string(content)))
|
||||||
|
fmt.Println("### end: " + label + " ###")
|
||||||
|
}
|
||||||
|
|
||||||
|
func exit(code int, err error) {
|
||||||
|
if code == fzf.ExitError && err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err.Error())
|
||||||
|
}
|
||||||
|
os.Exit(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
protector.Protect()
|
||||||
|
|
||||||
|
options, err := fzf.ParseOptions(true, os.Args[1:])
|
||||||
|
if err != nil {
|
||||||
|
exit(fzf.ExitError, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if options.Bash {
|
||||||
|
printScript("key-bindings.bash", bashKeyBindings)
|
||||||
|
printScript("completion.bash", bashCompletion)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if options.Zsh {
|
||||||
|
printScript("key-bindings.zsh", zshKeyBindings)
|
||||||
|
printScript("completion.zsh", zshCompletion)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if options.Fish {
|
||||||
|
printScript("key-bindings.fish", fishKeyBindings)
|
||||||
|
fmt.Println("fzf_key_bindings")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if options.Help {
|
||||||
|
fmt.Print(fzf.Usage)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if options.Version {
|
||||||
|
if len(revision) > 0 {
|
||||||
|
fmt.Printf("%s (%s)\n", version, revision)
|
||||||
|
} else {
|
||||||
|
fmt.Println(version)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if options.Man {
|
||||||
|
file := fzf.WriteTemporaryFile([]string{string(manPage)}, "\n")
|
||||||
|
if len(file) == 0 {
|
||||||
|
fmt.Print(string(manPage))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer os.Remove(file)
|
||||||
|
cmd := exec.Command("man", file)
|
||||||
|
cmd.Stdin = os.Stdin
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
fmt.Print(string(manPage))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
code, err := fzf.Run(options)
|
||||||
|
exit(code, err)
|
||||||
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
.ig
|
.ig
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2017 Junegunn Choi
|
Copyright (c) 2013-2025 Junegunn Choi
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
@@ -21,34 +21,48 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
..
|
..
|
||||||
.TH fzf-tmux 1 "Jan 2017" "fzf 0.16.3" "fzf-tmux - open fzf in tmux split pane"
|
.TH fzf\-tmux 1 "Oct 2025" "fzf 0.66.0" "fzf\-tmux - open fzf in tmux split pane"
|
||||||
|
|
||||||
.SH NAME
|
.SH NAME
|
||||||
fzf-tmux - open fzf in tmux split pane
|
fzf\-tmux - open fzf in tmux split pane
|
||||||
|
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
.B fzf-tmux [-u|-d [HEIGHT[%]]] [-l|-r [WIDTH[%]]] [--] [FZF OPTIONS]
|
.B fzf\-tmux [\fILAYOUT OPTIONS\fR] [\-\-] [\fIFZF OPTIONS\fR]
|
||||||
|
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
fzf-tmux is a wrapper script for fzf that opens fzf in a tmux split pane. It is
|
fzf\-tmux is a wrapper script for fzf that opens fzf in a tmux split pane or in
|
||||||
designed to work just like fzf except that it does not take up the whole
|
a tmux popup window. It is designed to work just like fzf except that it does
|
||||||
screen. You can safely use fzf-tmux instead of fzf in your scripts as the extra
|
not take up the whole screen. You can safely use fzf\-tmux instead of fzf in
|
||||||
options will be silently ignored if you're not on tmux.
|
your scripts as the extra options will be silently ignored if you're not on
|
||||||
|
tmux.
|
||||||
|
|
||||||
.SH OPTIONS
|
.SH LAYOUT OPTIONS
|
||||||
.SS Layout
|
|
||||||
|
|
||||||
(default: \fB-d 50%\fR)
|
(default layout: \fB\-d 50%\fR)
|
||||||
|
|
||||||
|
.SS Popup window
|
||||||
|
(requires tmux 3.2 or above)
|
||||||
.TP
|
.TP
|
||||||
.B "-u [height[%]]"
|
.B "\-p [WIDTH[%][,HEIGHT[%]]]"
|
||||||
|
.TP
|
||||||
|
.B "\-w WIDTH[%]"
|
||||||
|
.TP
|
||||||
|
.B "\-h WIDTH[%]"
|
||||||
|
.TP
|
||||||
|
.B "\-x COL"
|
||||||
|
.TP
|
||||||
|
.B "\-y ROW"
|
||||||
|
|
||||||
|
.SS Split pane
|
||||||
|
.TP
|
||||||
|
.B "\-u [height[%]]"
|
||||||
Split above (up)
|
Split above (up)
|
||||||
.TP
|
.TP
|
||||||
.B "-d [height[%]]"
|
.B "\-d [height[%]]"
|
||||||
Split below (down)
|
Split below (down)
|
||||||
.TP
|
.TP
|
||||||
.B "-l [width[%]]"
|
.B "\-l [width[%]]"
|
||||||
Split left
|
Split left
|
||||||
.TP
|
.TP
|
||||||
.B "-r [width[%]]"
|
.B "\-r [width[%]]"
|
||||||
Split right
|
Split right
|
||||||
|
|||||||
2216
man/man1/fzf.1
2216
man/man1/fzf.1
File diff suppressed because it is too large
Load Diff
853
plugin/fzf.vim
853
plugin/fzf.vim
File diff suppressed because it is too large
Load Diff
37
shell/common.sh
Normal file
37
shell/common.sh
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
__fzf_defaults() {
|
||||||
|
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||||
|
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||||
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
|
}
|
||||||
|
|
||||||
|
__fzf_exec_awk() {
|
||||||
|
# This function performs `exec awk "$@"` safely by working around awk
|
||||||
|
# compatibility issues.
|
||||||
|
#
|
||||||
|
# To reduce an extra fork, this function performs "exec" so is expected to be
|
||||||
|
# run as the last command in a subshell.
|
||||||
|
if [[ -z ${__fzf_awk-} ]]; then
|
||||||
|
__fzf_awk=awk
|
||||||
|
if [[ $OSTYPE == solaris* && -x /usr/xpg4/bin/awk ]]; then
|
||||||
|
# Note: Solaris awk at /usr/bin/awk is meant for backward compatibility
|
||||||
|
# with an ancient implementation of 1977 awk in the original UNIX. It
|
||||||
|
# lacks many features of POSIX awk, so it is essentially useless in the
|
||||||
|
# modern point of view. To use a standard-conforming version in Solaris,
|
||||||
|
# one needs to explicitly use /usr/xpg4/bin/awk.
|
||||||
|
__fzf_awk=/usr/xpg4/bin/awk
|
||||||
|
elif command -v mawk > /dev/null 2>&1; then
|
||||||
|
# choose the faster mawk if: it's installed && build date >= 20230322 &&
|
||||||
|
# version >= 1.3.4
|
||||||
|
local n x y z d
|
||||||
|
IFS=' .' read -r n x y z d <<< $(command mawk -W version 2> /dev/null)
|
||||||
|
[[ $n == mawk ]] && ((d >= 20230302 && (x * 1000 + y) * 1000 + z >= 1003004)) && __fzf_awk=mawk
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
# Note: macOS awk has a quirk that it stops processing at all when it sees
|
||||||
|
# any data not following UTF-8 in the input stream when the current LC_CTYPE
|
||||||
|
# specifies the UTF-8 encoding. To work around this quirk, one needs to
|
||||||
|
# specify LC_ALL=C to change the current encoding to the plain one.
|
||||||
|
LC_ALL=C exec "$__fzf_awk" "$@"
|
||||||
|
}
|
||||||
@@ -1,111 +1,265 @@
|
|||||||
#!/bin/bash
|
|
||||||
# ____ ____
|
# ____ ____
|
||||||
# / __/___ / __/
|
# / __/___ / __/
|
||||||
# / /_/_ / / /_
|
# / /_/_ / / /_
|
||||||
# / __/ / /_/ __/
|
# / __/ / /_/ __/
|
||||||
# /_/ /___/_/-completion.bash
|
# /_/ /___/_/ completion.bash
|
||||||
#
|
#
|
||||||
# - $FZF_TMUX (default: 0)
|
# - $FZF_TMUX (default: 0)
|
||||||
# - $FZF_TMUX_HEIGHT (default: '40%')
|
# - $FZF_TMUX_OPTS (default: empty)
|
||||||
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
||||||
# - $FZF_COMPLETION_OPTS (default: empty)
|
# - $FZF_COMPLETION_OPTS (default: empty)
|
||||||
|
# - $FZF_COMPLETION_PATH_OPTS (default: empty)
|
||||||
|
# - $FZF_COMPLETION_DIR_OPTS (default: empty)
|
||||||
|
|
||||||
|
if [[ $- =~ i ]]; then
|
||||||
|
|
||||||
|
|
||||||
# To use custom commands instead of find, override _fzf_compgen_{path,dir}
|
# To use custom commands instead of find, override _fzf_compgen_{path,dir}
|
||||||
if ! declare -f _fzf_compgen_path > /dev/null; then
|
#
|
||||||
_fzf_compgen_path() {
|
# _fzf_compgen_path() {
|
||||||
echo "$1"
|
# echo "$1"
|
||||||
command find -L "$1" \
|
# command find -L "$1" \
|
||||||
-name .git -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
||||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
# -a -not -path "$1" -print 2> /dev/null | command sed 's@^\./@@'
|
||||||
}
|
# }
|
||||||
fi
|
#
|
||||||
|
# _fzf_compgen_dir() {
|
||||||
if ! declare -f _fzf_compgen_dir > /dev/null; then
|
# command find -L "$1" \
|
||||||
_fzf_compgen_dir() {
|
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o -type d \
|
||||||
command find -L "$1" \
|
# -a -not -path "$1" -print 2> /dev/null | command sed 's@^\./@@'
|
||||||
-name .git -prune -o -name .svn -prune -o -type d \
|
# }
|
||||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
||||||
|
|
||||||
# To redraw line after fzf closes (printf '\e[5n')
|
#----BEGIN shfmt
|
||||||
bind '"\e[0n": redraw-current-line'
|
#----BEGIN INCLUDE common.sh
|
||||||
|
# NOTE: Do not directly edit this section, which is copied from "common.sh".
|
||||||
|
# To modify it, one can edit "common.sh" and run "./update.sh" to apply
|
||||||
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
__fzfcmd_complete() {
|
__fzf_defaults() {
|
||||||
[ -n "$TMUX_PANE" ] && [ "${FZF_TMUX:-0}" != 0 ] && [ ${LINES:-40} -gt 15 ] &&
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
echo "fzf-tmux -d${FZF_TMUX_HEIGHT:-40%}" || echo "fzf"
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_orig_completion_filter() {
|
__fzf_exec_awk() {
|
||||||
sed 's/^\(.*-F\) *\([^ ]*\).* \([^ ]*\)$/export _fzf_orig_completion_\3="\1 %s \3 #\2";/' |
|
if [[ -z ${__fzf_awk-} ]]; then
|
||||||
awk -F= '{gsub(/[^A-Za-z0-9_= ;]/, "_", $1); print $1"="$2}'
|
__fzf_awk=awk
|
||||||
|
if [[ $OSTYPE == solaris* && -x /usr/xpg4/bin/awk ]]; then
|
||||||
|
__fzf_awk=/usr/xpg4/bin/awk
|
||||||
|
elif command -v mawk > /dev/null 2>&1; then
|
||||||
|
local n x y z d
|
||||||
|
IFS=' .' read -r n x y z d <<< $(command mawk -W version 2> /dev/null)
|
||||||
|
[[ $n == mawk ]] && ((d >= 20230302 && (x * 1000 + y) * 1000 + z >= 1003004)) && __fzf_awk=mawk
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
LC_ALL=C exec "$__fzf_awk" "$@"
|
||||||
|
}
|
||||||
|
#----END INCLUDE
|
||||||
|
|
||||||
|
__fzf_comprun() {
|
||||||
|
if [[ "$(type -t _fzf_comprun 2>&1)" == function ]]; then
|
||||||
|
_fzf_comprun "$@"
|
||||||
|
elif [[ -n ${TMUX_PANE-} ]] && { [[ ${FZF_TMUX:-0} != 0 ]] || [[ -n ${FZF_TMUX_OPTS-} ]]; }; then
|
||||||
|
shift
|
||||||
|
fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- "$@"
|
||||||
|
else
|
||||||
|
shift
|
||||||
|
fzf "$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
__fzf_orig_completion() {
|
||||||
|
local l comp f cmd
|
||||||
|
while read -r l; do
|
||||||
|
if [[ $l =~ ^(.*\ -F)\ *([^ ]*).*\ ([^ ]*)$ ]]; then
|
||||||
|
comp="${BASH_REMATCH[1]}"
|
||||||
|
f="${BASH_REMATCH[2]}"
|
||||||
|
cmd="${BASH_REMATCH[3]}"
|
||||||
|
[[ $f == _fzf_* ]] && continue
|
||||||
|
printf -v "_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}" "%s" "${comp} %s ${cmd} #${f}"
|
||||||
|
if [[ $l == *" -o nospace "* ]] && [[ ${__fzf_nospace_commands-} != *" $cmd "* ]]; then
|
||||||
|
__fzf_nospace_commands="${__fzf_nospace_commands-} $cmd "
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# @param $1 cmd - Command name for which the original completion is searched
|
||||||
|
# @var[out] REPLY - Original function name is returned
|
||||||
|
__fzf_orig_completion_get_orig_func() {
|
||||||
|
local cmd orig_var orig
|
||||||
|
cmd=$1
|
||||||
|
orig_var="_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}"
|
||||||
|
orig="${!orig_var-}"
|
||||||
|
REPLY="${orig##*#}"
|
||||||
|
[[ $REPLY ]] && type "$REPLY" &> /dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
# @param $1 cmd - Command name for which the original completion is searched
|
||||||
|
# @param $2 func - Fzf's completion function to replace the original function
|
||||||
|
# @var[out] REPLY - Completion setting is returned as a string to "eval"
|
||||||
|
__fzf_orig_completion_instantiate() {
|
||||||
|
local cmd func orig_var orig
|
||||||
|
cmd=$1
|
||||||
|
func=$2
|
||||||
|
orig_var="_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}"
|
||||||
|
orig="${!orig_var-}"
|
||||||
|
orig="${orig%#*}"
|
||||||
|
[[ $orig == *' %s '* ]] || return 1
|
||||||
|
printf -v REPLY "$orig" "$func"
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_opts_completion() {
|
_fzf_opts_completion() {
|
||||||
local cur prev opts
|
local cur prev opts
|
||||||
COMPREPLY=()
|
COMPREPLY=()
|
||||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
prev="${COMP_WORDS[COMP_CWORD - 1]}"
|
||||||
opts="
|
opts="
|
||||||
-x --extended
|
+c --no-color
|
||||||
-e --exact
|
+i --no-ignore-case
|
||||||
--algo
|
|
||||||
-i +i
|
|
||||||
-n --nth
|
|
||||||
--with-nth
|
|
||||||
-d --delimiter
|
|
||||||
+s --no-sort
|
+s --no-sort
|
||||||
--tac
|
+x --no-extended
|
||||||
--tiebreak
|
|
||||||
-m --multi
|
|
||||||
--no-mouse
|
|
||||||
--bind
|
|
||||||
--cycle
|
|
||||||
--no-hscroll
|
|
||||||
--jump-labels
|
|
||||||
--height
|
|
||||||
--literal
|
|
||||||
--reverse
|
|
||||||
--margin
|
|
||||||
--inline-info
|
|
||||||
--prompt
|
|
||||||
--header
|
|
||||||
--header-lines
|
|
||||||
--ansi
|
--ansi
|
||||||
--tabstop
|
--bash
|
||||||
|
--bind
|
||||||
|
--border
|
||||||
|
--border-label
|
||||||
|
--border-label-pos
|
||||||
--color
|
--color
|
||||||
--no-bold
|
--cycle
|
||||||
|
--disabled
|
||||||
|
--ellipsis
|
||||||
|
--expect
|
||||||
|
--filepath-word
|
||||||
|
--fish
|
||||||
|
--header
|
||||||
|
--header-first
|
||||||
|
--header-lines
|
||||||
|
--height
|
||||||
|
--highlight-line
|
||||||
--history
|
--history
|
||||||
--history-size
|
--history-size
|
||||||
|
--hscroll-off
|
||||||
|
--info
|
||||||
|
--jump-labels
|
||||||
|
--keep-right
|
||||||
|
--layout
|
||||||
|
--listen
|
||||||
|
--listen-unsafe
|
||||||
|
--literal
|
||||||
|
--man
|
||||||
|
--margin
|
||||||
|
--marker
|
||||||
|
--min-height
|
||||||
|
--no-bold
|
||||||
|
--no-clear
|
||||||
|
--no-hscroll
|
||||||
|
--no-mouse
|
||||||
|
--no-scrollbar
|
||||||
|
--no-separator
|
||||||
|
--no-unicode
|
||||||
|
--padding
|
||||||
|
--pointer
|
||||||
--preview
|
--preview
|
||||||
|
--preview-label
|
||||||
|
--preview-label-pos
|
||||||
--preview-window
|
--preview-window
|
||||||
-q --query
|
|
||||||
-1 --select-1
|
|
||||||
-0 --exit-0
|
|
||||||
-f --filter
|
|
||||||
--print-query
|
--print-query
|
||||||
--expect
|
--print0
|
||||||
--sync"
|
--prompt
|
||||||
|
--read0
|
||||||
|
--reverse
|
||||||
|
--scheme
|
||||||
|
--scroll-off
|
||||||
|
--separator
|
||||||
|
--sync
|
||||||
|
--tabstop
|
||||||
|
--tac
|
||||||
|
--tiebreak
|
||||||
|
--tmux
|
||||||
|
--track
|
||||||
|
--version
|
||||||
|
--with-nth
|
||||||
|
--with-shell
|
||||||
|
--wrap
|
||||||
|
--zsh
|
||||||
|
-0 --exit-0
|
||||||
|
-1 --select-1
|
||||||
|
-d --delimiter
|
||||||
|
-e --exact
|
||||||
|
-f --filter
|
||||||
|
-h --help
|
||||||
|
-i --ignore-case
|
||||||
|
-m --multi
|
||||||
|
-n --nth
|
||||||
|
-q --query
|
||||||
|
--"
|
||||||
|
|
||||||
case "${prev}" in
|
case "${prev}" in
|
||||||
--tiebreak)
|
--scheme)
|
||||||
COMPREPLY=( $(compgen -W "length begin end index" -- "$cur") )
|
COMPREPLY=($(compgen -W "default path history" -- "$cur"))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
--color)
|
--tiebreak)
|
||||||
COMPREPLY=( $(compgen -W "dark light 16 bw" -- "$cur") )
|
COMPREPLY=($(compgen -W "length chunk begin end index" -- "$cur"))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
--history)
|
--color)
|
||||||
COMPREPLY=()
|
COMPREPLY=($(compgen -W "dark light 16 bw no" -- "$cur"))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
|
--layout)
|
||||||
|
COMPREPLY=($(compgen -W "default reverse reverse-list" -- "$cur"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--info)
|
||||||
|
COMPREPLY=($(compgen -W "default right hidden inline inline-right" -- "$cur"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--preview-window)
|
||||||
|
COMPREPLY=($(compgen -W "
|
||||||
|
default
|
||||||
|
hidden
|
||||||
|
nohidden
|
||||||
|
wrap
|
||||||
|
nowrap
|
||||||
|
cycle
|
||||||
|
nocycle
|
||||||
|
up top
|
||||||
|
down bottom
|
||||||
|
left
|
||||||
|
right
|
||||||
|
rounded border border-rounded
|
||||||
|
sharp border-sharp
|
||||||
|
border-bold
|
||||||
|
border-block
|
||||||
|
border-thinblock
|
||||||
|
border-double
|
||||||
|
noborder border-none
|
||||||
|
border-horizontal
|
||||||
|
border-vertical
|
||||||
|
border-up border-top
|
||||||
|
border-down border-bottom
|
||||||
|
border-left
|
||||||
|
border-right
|
||||||
|
follow
|
||||||
|
nofollow" -- "$cur"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--border)
|
||||||
|
COMPREPLY=($(compgen -W "rounded sharp bold block thinblock double horizontal vertical top bottom left right none" -- "$cur"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--border-label-pos | --preview-label-pos)
|
||||||
|
COMPREPLY=($(compgen -W "center bottom top" -- "$cur"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
if [[ "$cur" =~ ^-|\+ ]]; then
|
if [[ $cur =~ ^-|\+ ]]; then
|
||||||
COMPREPLY=( $(compgen -W "${opts}" -- "$cur") )
|
COMPREPLY=($(compgen -W "${opts}" -- "$cur"))
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -113,55 +267,90 @@ _fzf_opts_completion() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_fzf_handle_dynamic_completion() {
|
_fzf_handle_dynamic_completion() {
|
||||||
local cmd orig_var orig ret orig_cmd
|
local cmd ret REPLY orig_cmd orig_complete
|
||||||
cmd="$1"
|
cmd="$1"
|
||||||
shift
|
shift
|
||||||
orig_cmd="$1"
|
orig_cmd="$1"
|
||||||
orig_var="_fzf_orig_completion_$cmd"
|
if __fzf_orig_completion_get_orig_func "$cmd"; then
|
||||||
orig="${!orig_var##*#}"
|
"$REPLY" "$@"
|
||||||
if [ -n "$orig" ] && type "$orig" > /dev/null 2>&1; then
|
elif [[ -n ${_fzf_completion_loader-} ]]; then
|
||||||
$orig "$@"
|
orig_complete=$(complete -p "$orig_cmd" 2> /dev/null)
|
||||||
elif [ -n "$_fzf_completion_loader" ]; then
|
$_fzf_completion_loader "$@"
|
||||||
_completion_loader "$@"
|
|
||||||
ret=$?
|
ret=$?
|
||||||
eval "$(complete | command grep "\-F.* $orig_cmd$" | _fzf_orig_completion_filter)"
|
# _completion_loader may not have updated completion for the command
|
||||||
source "${BASH_SOURCE[0]}"
|
if [[ "$(complete -p "$orig_cmd" 2> /dev/null)" != "$orig_complete" ]]; then
|
||||||
|
__fzf_orig_completion < <(complete -p "$orig_cmd" 2> /dev/null)
|
||||||
|
__fzf_orig_completion_get_orig_func "$cmd" || ret=1
|
||||||
|
|
||||||
|
# Update orig_complete by _fzf_orig_completion entry
|
||||||
|
[[ $orig_complete =~ ' -F '(_fzf_[^ ]+)' ' ]] &&
|
||||||
|
__fzf_orig_completion_instantiate "$cmd" "${BASH_REMATCH[1]}" &&
|
||||||
|
orig_complete=$REPLY
|
||||||
|
|
||||||
|
if [[ ${__fzf_nospace_commands-} == *" $orig_cmd "* ]]; then
|
||||||
|
eval "${orig_complete/ -F / -o nospace -F }"
|
||||||
|
else
|
||||||
|
eval "$orig_complete"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
[[ $ret -eq 0 ]] && return 124
|
||||||
return $ret
|
return $ret
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_generic_path_completion() {
|
__fzf_generic_path_completion() {
|
||||||
local cur base dir leftover matches trigger cmd fzf
|
local cur base dir leftover matches trigger cmd
|
||||||
fzf="$(__fzfcmd_complete)"
|
cmd="${COMP_WORDS[0]}"
|
||||||
cmd="${COMP_WORDS[0]//[^A-Za-z0-9_=]/_}"
|
if [[ $cmd == \\* ]]; then
|
||||||
|
cmd="${cmd:1}"
|
||||||
|
fi
|
||||||
COMPREPLY=()
|
COMPREPLY=()
|
||||||
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
||||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
[[ $COMP_CWORD -ge 0 ]] && cur="${COMP_WORDS[COMP_CWORD]}"
|
||||||
if [[ "$cur" == *"$trigger" ]]; then
|
if [[ $cur == *"$trigger" ]] && [[ $cur != *'$('* ]] && [[ $cur != *':='* ]] && [[ $cur != *'`'* ]]; then
|
||||||
base=${cur:0:${#cur}-${#trigger}}
|
base=${cur:0:${#cur}-${#trigger}}
|
||||||
eval "base=$base"
|
eval "base=$base" 2> /dev/null || return
|
||||||
|
|
||||||
dir="$base"
|
dir=
|
||||||
|
[[ $base == *"/"* ]] && dir="$base"
|
||||||
while true; do
|
while true; do
|
||||||
if [ -z "$dir" ] || [ -d "$dir" ]; then
|
if [[ -z $dir ]] || [[ -d $dir ]]; then
|
||||||
leftover=${base/#"$dir"}
|
leftover=${base/#"$dir"/}
|
||||||
leftover=${leftover/#\/}
|
leftover=${leftover/#\//}
|
||||||
[ -z "$dir" ] && dir='.'
|
[[ -z $dir ]] && dir='.'
|
||||||
[ "$dir" != "/" ] && dir="${dir/%\//}"
|
[[ $dir != "/" ]] && dir="${dir/%\//}"
|
||||||
matches=$(eval "$1 $(printf %q "$dir")" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_COMPLETION_OPTS" $fzf $2 -q "$leftover" | while read -r item; do
|
matches=$(
|
||||||
printf "%q$3 " "$item"
|
export FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --scheme=path" "${FZF_COMPLETION_OPTS-} $2")
|
||||||
done)
|
unset FZF_DEFAULT_COMMAND FZF_DEFAULT_OPTS_FILE
|
||||||
|
if declare -F "$1" > /dev/null; then
|
||||||
|
eval "$1 $(printf %q "$dir")" | __fzf_comprun "$4" -q "$leftover"
|
||||||
|
else
|
||||||
|
if [[ $1 =~ dir ]]; then
|
||||||
|
walker=dir,follow
|
||||||
|
eval "rest=(${FZF_COMPLETION_DIR_OPTS-})"
|
||||||
|
else
|
||||||
|
walker=file,dir,follow,hidden
|
||||||
|
eval "rest=(${FZF_COMPLETION_PATH_OPTS-})"
|
||||||
|
fi
|
||||||
|
__fzf_comprun "$4" -q "$leftover" --walker "$walker" --walker-root="$dir" "${rest[@]}"
|
||||||
|
fi | while read -r item; do
|
||||||
|
printf "%q " "${item%$3}$3"
|
||||||
|
done
|
||||||
|
)
|
||||||
matches=${matches% }
|
matches=${matches% }
|
||||||
if [ -n "$matches" ]; then
|
[[ -z $3 ]] && [[ ${__fzf_nospace_commands-} == *" ${COMP_WORDS[0]} "* ]] && matches="$matches "
|
||||||
COMPREPLY=( "$matches" )
|
if [[ -n $matches ]]; then
|
||||||
|
COMPREPLY=("$matches")
|
||||||
else
|
else
|
||||||
COMPREPLY=( "$cur" )
|
COMPREPLY=("$cur")
|
||||||
fi
|
fi
|
||||||
|
# To redraw line after fzf closes (printf '\e[5n')
|
||||||
|
bind '"\e[0n": redraw-current-line' 2> /dev/null
|
||||||
printf '\e[5n'
|
printf '\e[5n'
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
dir=$(dirname "$dir")
|
dir=$(command dirname "$dir")
|
||||||
[[ "$dir" =~ /$ ]] || dir="$dir"/
|
[[ $dir =~ /$ ]] || dir="$dir"/
|
||||||
done
|
done
|
||||||
else
|
else
|
||||||
shift
|
shift
|
||||||
@@ -172,28 +361,53 @@ __fzf_generic_path_completion() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_fzf_complete() {
|
_fzf_complete() {
|
||||||
local cur selected trigger cmd fzf post
|
# Split arguments around --
|
||||||
post="$(caller 0 | awk '{print $2}')_post"
|
local args rest str_arg i sep
|
||||||
type -t "$post" > /dev/null 2>&1 || post=cat
|
args=("$@")
|
||||||
fzf="$(__fzfcmd_complete)"
|
sep=
|
||||||
|
for i in "${!args[@]}"; do
|
||||||
|
if [[ ${args[$i]} == -- ]]; then
|
||||||
|
sep=$i
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
if [[ -n $sep ]]; then
|
||||||
|
str_arg=
|
||||||
|
rest=("${args[@]:$((sep + 1)):${#args[@]}}")
|
||||||
|
args=("${args[@]:0:sep}")
|
||||||
|
else
|
||||||
|
str_arg=$1
|
||||||
|
args=()
|
||||||
|
shift
|
||||||
|
rest=("$@")
|
||||||
|
fi
|
||||||
|
|
||||||
|
local cur selected trigger cmd post
|
||||||
|
post="$(caller 0 | __fzf_exec_awk '{print $2}')_post"
|
||||||
|
type -t "$post" > /dev/null 2>&1 || post='command cat'
|
||||||
|
|
||||||
cmd="${COMP_WORDS[0]//[^A-Za-z0-9_=]/_}"
|
|
||||||
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
||||||
|
cmd="${COMP_WORDS[0]}"
|
||||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||||
if [[ "$cur" == *"$trigger" ]]; then
|
if [[ $cur == *"$trigger" ]] && [[ $cur != *'$('* ]] && [[ $cur != *':='* ]] && [[ $cur != *'`'* ]]; then
|
||||||
cur=${cur:0:${#cur}-${#trigger}}
|
cur=${cur:0:${#cur}-${#trigger}}
|
||||||
|
|
||||||
selected=$(cat | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_COMPLETION_OPTS" $fzf $1 -q "$cur" | $post | tr '\n' ' ')
|
selected=$(
|
||||||
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse" "${FZF_COMPLETION_OPTS-} $str_arg") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' \
|
||||||
|
__fzf_comprun "${rest[0]}" "${args[@]}" -q "$cur" | eval "$post" | command tr '\n' ' '
|
||||||
|
)
|
||||||
selected=${selected% } # Strip trailing space not to repeat "-o nospace"
|
selected=${selected% } # Strip trailing space not to repeat "-o nospace"
|
||||||
printf '\e[5n'
|
if [[ -n $selected ]]; then
|
||||||
|
|
||||||
if [ -n "$selected" ]; then
|
|
||||||
COMPREPLY=("$selected")
|
COMPREPLY=("$selected")
|
||||||
return 0
|
else
|
||||||
|
COMPREPLY=("$cur")
|
||||||
fi
|
fi
|
||||||
|
bind '"\e[0n": redraw-current-line' 2> /dev/null
|
||||||
|
printf '\e[5n'
|
||||||
|
return 0
|
||||||
else
|
else
|
||||||
shift
|
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
||||||
_fzf_handle_dynamic_completion "$cmd" "$@"
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -211,114 +425,266 @@ _fzf_dir_completion() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_fzf_complete_kill() {
|
_fzf_complete_kill() {
|
||||||
[ -n "${COMP_WORDS[COMP_CWORD]}" ] && return 1
|
_fzf_proc_completion "$@"
|
||||||
|
|
||||||
local selected fzf
|
|
||||||
fzf="$(__fzfcmd_complete)"
|
|
||||||
selected=$(ps -ef | sed 1d | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-50%} --min-height 15 --reverse $FZF_DEFAULT_OPTS --preview 'echo {}' --preview-window down:3:wrap $FZF_COMPLETION_OPTS" $fzf -m | awk '{print $2}' | tr '\n' ' ')
|
|
||||||
printf '\e[5n'
|
|
||||||
|
|
||||||
if [ -n "$selected" ]; then
|
|
||||||
COMPREPLY=( "$selected" )
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_complete_telnet() {
|
_fzf_proc_completion() {
|
||||||
_fzf_complete '+m' "$@" < <(
|
local transformer
|
||||||
command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0' |
|
transformer='
|
||||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
if [[ $FZF_KEY =~ ctrl|alt|shift ]] && [[ -n $FZF_NTH ]]; then
|
||||||
)
|
nths=( ${FZF_NTH//,/ } )
|
||||||
|
new_nths=()
|
||||||
|
found=0
|
||||||
|
for nth in ${nths[@]}; do
|
||||||
|
if [[ $nth = $FZF_CLICK_HEADER_NTH ]]; then
|
||||||
|
found=1
|
||||||
|
else
|
||||||
|
new_nths+=($nth)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
[[ $found = 0 ]] && new_nths+=($FZF_CLICK_HEADER_NTH)
|
||||||
|
new_nths=${new_nths[*]}
|
||||||
|
new_nths=${new_nths// /,}
|
||||||
|
echo "change-nth($new_nths)+change-prompt($new_nths> )"
|
||||||
|
else
|
||||||
|
if [[ $FZF_NTH = $FZF_CLICK_HEADER_NTH ]]; then
|
||||||
|
echo "change-nth()+change-prompt(> )"
|
||||||
|
else
|
||||||
|
echo "change-nth($FZF_CLICK_HEADER_NTH)+change-prompt($FZF_CLICK_HEADER_WORD> )"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
'
|
||||||
|
_fzf_complete -m --header-lines=1 --no-preview --wrap --color fg:dim,nth:regular \
|
||||||
|
--bind "click-header:transform:$transformer" -- "$@" < <(
|
||||||
|
command ps -eo user,pid,ppid,start,time,command 2> /dev/null ||
|
||||||
|
command ps -eo user,pid,ppid,time,args 2> /dev/null || # For BusyBox
|
||||||
|
command ps --everyone --full --windows # For cygwin
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_fzf_proc_completion_post() {
|
||||||
|
__fzf_exec_awk '{print $2}'
|
||||||
|
}
|
||||||
|
|
||||||
|
# To use custom hostname lists, override __fzf_list_hosts.
|
||||||
|
# The function is expected to print hostnames, one per line as well as in the
|
||||||
|
# desired sorting and with any duplicates removed, to standard output.
|
||||||
|
#
|
||||||
|
# e.g.
|
||||||
|
# # Use bash-completions’s _known_hosts_real() for getting the list of hosts
|
||||||
|
# __fzf_list_hosts() {
|
||||||
|
# # Set the local attribute for any non-local variable that is set by _known_hosts_real()
|
||||||
|
# local COMPREPLY=()
|
||||||
|
# _known_hosts_real ''
|
||||||
|
# printf '%s\n' "${COMPREPLY[@]}" | command sort -u --version-sort
|
||||||
|
# }
|
||||||
|
if ! declare -F __fzf_list_hosts > /dev/null; then
|
||||||
|
__fzf_list_hosts() {
|
||||||
|
command sort -u \
|
||||||
|
<(
|
||||||
|
# Note: To make the pathname expansion of "~/.ssh/config.d/*" work
|
||||||
|
# properly, we need to adjust the related shell options. We need to
|
||||||
|
# unset "set -f" and "GLOBIGNORE", which disable the pathname expansion
|
||||||
|
# totally or partially. We need to unset "dotglob" and "nocaseglob" to
|
||||||
|
# avoid matching unwanted files. We need to unset "failglob" to avoid
|
||||||
|
# outputting the error messages to the terminal when no matching is
|
||||||
|
# found. We need to set "nullglob" to avoid attempting to read the
|
||||||
|
# literal filename '~/.ssh/config.d/*' when no matching is found.
|
||||||
|
set +f
|
||||||
|
GLOBIGNORE=
|
||||||
|
shopt -u dotglob nocaseglob failglob
|
||||||
|
shopt -s nullglob
|
||||||
|
|
||||||
|
__fzf_exec_awk '
|
||||||
|
# Note: mawk <= 1.3.3-20090705 does not support the POSIX brackets of
|
||||||
|
# the form [[:blank:]], and Ubuntu 18.04 LTS still uses this
|
||||||
|
# 16-year-old mawk unfortunately. We need to use [ \t] instead.
|
||||||
|
match(tolower($0), /^[ \t]*host(name)?[ \t]*[ \t=]/) {
|
||||||
|
$0 = substr($0, RLENGTH + 1) # Remove "Host(name)?=?"
|
||||||
|
sub(/#.*/, "")
|
||||||
|
for (i = 1; i <= NF; i++)
|
||||||
|
if ($i !~ /[*?%]/)
|
||||||
|
print $i
|
||||||
|
}
|
||||||
|
' ~/.ssh/config ~/.ssh/config.d/* /etc/ssh/ssh_config 2> /dev/null
|
||||||
|
) \
|
||||||
|
<(
|
||||||
|
__fzf_exec_awk -F ',' '
|
||||||
|
match($0, /^[][a-zA-Z0-9.,:-]+/) {
|
||||||
|
$0 = substr($0, 1, RLENGTH)
|
||||||
|
gsub(/[][]|:[^,]*/, "")
|
||||||
|
for (i = 1; i <= NF; i++)
|
||||||
|
print $i
|
||||||
|
}
|
||||||
|
' ~/.ssh/known_hosts 2> /dev/null
|
||||||
|
) \
|
||||||
|
<(
|
||||||
|
__fzf_exec_awk '
|
||||||
|
{
|
||||||
|
sub(/#.*/, "")
|
||||||
|
for (i = 2; i <= NF; i++)
|
||||||
|
if ($i != "0.0.0.0")
|
||||||
|
print $i
|
||||||
|
}
|
||||||
|
' /etc/hosts 2> /dev/null
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
_fzf_host_completion() {
|
||||||
|
_fzf_complete +m -- "$@" < <(__fzf_list_hosts)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Values for $1 $2 $3 are described here
|
||||||
|
# https://www.gnu.org/software/bash/manual/html_node/Programmable-Completion.html
|
||||||
|
# > the first argument ($1) is the name of the command whose arguments are being completed,
|
||||||
|
# > the second argument ($2) is the word being completed,
|
||||||
|
# > and the third argument ($3) is the word preceding the word being completed on the current command line.
|
||||||
_fzf_complete_ssh() {
|
_fzf_complete_ssh() {
|
||||||
_fzf_complete '+m' "$@" < <(
|
case $3 in
|
||||||
cat <(cat ~/.ssh/config /etc/ssh/ssh_config 2> /dev/null | command grep -i '^host' | command grep -v '*') \
|
-i | -F | -E)
|
||||||
<(command grep -oE '^[a-z0-9.,-]+' ~/.ssh/known_hosts | tr ',' '\n' | awk '{ print $1 " " $1 }') \
|
_fzf_path_completion "$@"
|
||||||
<(command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0') |
|
;;
|
||||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
*)
|
||||||
|
local user=
|
||||||
|
[[ $2 =~ '@' ]] && user="${2%%@*}@"
|
||||||
|
_fzf_complete +m -- "$@" < <(__fzf_list_hosts | __fzf_exec_awk -v user="$user" '{print user $0}')
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
_fzf_var_completion() {
|
||||||
|
_fzf_complete -m -- "$@" < <(
|
||||||
|
declare -xp | command sed -En 's|^declare [^ ]+ ([^=]+).*|\1|p'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_complete_unset() {
|
_fzf_alias_completion() {
|
||||||
_fzf_complete '-m' "$@" < <(
|
_fzf_complete -m -- "$@" < <(
|
||||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
alias | command sed -En 's|^alias ([^=]+).*|\1|p'
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
_fzf_complete_export() {
|
|
||||||
_fzf_complete '-m' "$@" < <(
|
|
||||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
_fzf_complete_unalias() {
|
|
||||||
_fzf_complete '-m' "$@" < <(
|
|
||||||
alias | sed 's/=.*//' | sed 's/.* //'
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
# fzf options
|
# fzf options
|
||||||
complete -o default -F _fzf_opts_completion fzf
|
complete -o default -F _fzf_opts_completion fzf
|
||||||
|
# fzf-tmux is a thin fzf wrapper that has only a few more options than fzf
|
||||||
|
# itself. As a quick improvement we take fzf's completion. Adding the few extra
|
||||||
|
# fzf-tmux specific options (like `-w WIDTH`) are left as a future patch.
|
||||||
|
complete -o default -F _fzf_opts_completion fzf-tmux
|
||||||
|
|
||||||
d_cmds="${FZF_COMPLETION_DIR_COMMANDS:-cd pushd rmdir}"
|
# Default path completion
|
||||||
a_cmds="
|
__fzf_default_completion() {
|
||||||
awk cat diff diff3
|
__fzf_generic_path_completion _fzf_compgen_path "-m" "" "$@"
|
||||||
emacs emacsclient ex file ftp g++ gcc gvim head hg java
|
|
||||||
|
# Dynamic completion loader has updated the completion for the command
|
||||||
|
if [[ $? -eq 124 ]]; then
|
||||||
|
# We trigger _fzf_setup_completion so that fuzzy completion for the command
|
||||||
|
# still works. However, loader can update the completion for multiple
|
||||||
|
# commands at once, and fuzzy completion will no longer work for those
|
||||||
|
# other commands. e.g. pytest -> py.test, pytest-2, pytest-3, etc
|
||||||
|
_fzf_setup_completion path "$1"
|
||||||
|
return 124
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set fuzzy path completion as the default completion for all commands.
|
||||||
|
# We can't set up default completion,
|
||||||
|
# 1. if it's already set up by another script
|
||||||
|
# 2. or if the current version of bash doesn't support -D option
|
||||||
|
complete | command grep -q __fzf_default_completion ||
|
||||||
|
complete | command grep -- '-D$' | command grep -qv _comp_complete_load ||
|
||||||
|
complete -D -F __fzf_default_completion -o default -o bashdefault 2> /dev/null
|
||||||
|
|
||||||
|
d_cmds="${FZF_COMPLETION_DIR_COMMANDS-cd pushd rmdir}"
|
||||||
|
|
||||||
|
# NOTE: $FZF_COMPLETION_PATH_COMMANDS and $FZF_COMPLETION_VAR_COMMANDS are
|
||||||
|
# undocumented and subject to change in the future.
|
||||||
|
#
|
||||||
|
# NOTE: Although we have default completion, we still need to set up completion
|
||||||
|
# for each command in case they already have completion set up by another script.
|
||||||
|
a_cmds="${FZF_COMPLETION_PATH_COMMANDS-"
|
||||||
|
awk bat cat code diff diff3
|
||||||
|
emacs emacsclient ex file ftp g++ gcc gvim head hg hx java
|
||||||
javac ld less more mvim nvim patch perl python ruby
|
javac ld less more mvim nvim patch perl python ruby
|
||||||
sed sftp sort source tail tee uniq vi view vim wc xdg-open
|
sed sftp sort source tail tee uniq vi view vim wc xdg-open
|
||||||
basename bunzip2 bzip2 chmod chown curl cp dirname du
|
basename bunzip2 bzip2 chmod chown curl cp dirname du
|
||||||
find git grep gunzip gzip hg jar
|
find git grep gunzip gzip hg jar
|
||||||
ln ls mv open rm rsync scp
|
ln ls mv open rm rsync scp
|
||||||
svn tar unzip zip"
|
svn tar unzip zip"}"
|
||||||
x_cmds="kill ssh telnet unset unalias export"
|
v_cmds="${FZF_COMPLETION_VAR_COMMANDS-export unset printenv}"
|
||||||
|
|
||||||
# Preserve existing completion
|
# Preserve existing completion
|
||||||
eval $(complete |
|
__fzf_orig_completion < <(complete -p $d_cmds $a_cmds $v_cmds unalias kill ssh 2> /dev/null)
|
||||||
sed -E '/-F/!d; / _fzf/d; '"/ ($(echo $d_cmds $a_cmds $x_cmds | sed 's/ /|/g; s/+/\\+/g'))$/"'!d' |
|
|
||||||
_fzf_orig_completion_filter)
|
|
||||||
|
|
||||||
if type _completion_loader > /dev/null 2>&1; then
|
if type _comp_load > /dev/null 2>&1; then
|
||||||
_fzf_completion_loader=1
|
# _comp_load was added in bash-completion 2.12 to replace _completion_loader.
|
||||||
|
# We use it without -D option so that it does not use _comp_complete_minimal as the fallback.
|
||||||
|
_fzf_completion_loader=_comp_load
|
||||||
|
elif type __load_completion > /dev/null 2>&1; then
|
||||||
|
# In bash-completion 2.11, _completion_loader internally calls __load_completion
|
||||||
|
# and if it returns a non-zero status, it sets the default 'minimal' completion.
|
||||||
|
_fzf_completion_loader=__load_completion
|
||||||
|
elif type _completion_loader > /dev/null 2>&1; then
|
||||||
|
_fzf_completion_loader=_completion_loader
|
||||||
fi
|
fi
|
||||||
|
|
||||||
_fzf_defc() {
|
__fzf_defc() {
|
||||||
local cmd func opts orig_var orig def
|
local cmd func opts REPLY
|
||||||
cmd="$1"
|
cmd="$1"
|
||||||
func="$2"
|
func="$2"
|
||||||
opts="$3"
|
opts="$3"
|
||||||
orig_var="_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}"
|
if __fzf_orig_completion_instantiate "$cmd" "$func"; then
|
||||||
orig="${!orig_var}"
|
eval "$REPLY"
|
||||||
if [ -n "$orig" ]; then
|
|
||||||
printf -v def "$orig" "$func"
|
|
||||||
eval "$def"
|
|
||||||
else
|
else
|
||||||
complete -F "$func" $opts "$cmd"
|
eval "complete -F \"$func\" $opts \"$cmd\""
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# Anything
|
# Anything
|
||||||
for cmd in $a_cmds; do
|
for cmd in $a_cmds; do
|
||||||
_fzf_defc "$cmd" _fzf_path_completion "-o default -o bashdefault"
|
__fzf_defc "$cmd" _fzf_path_completion "-o default -o bashdefault"
|
||||||
done
|
done
|
||||||
|
|
||||||
# Directory
|
# Directory
|
||||||
for cmd in $d_cmds; do
|
for cmd in $d_cmds; do
|
||||||
_fzf_defc "$cmd" _fzf_dir_completion "-o nospace -o plusdirs"
|
__fzf_defc "$cmd" _fzf_dir_completion "-o bashdefault -o nospace -o dirnames"
|
||||||
done
|
done
|
||||||
|
|
||||||
unset _fzf_defc
|
# Variables
|
||||||
|
for cmd in $v_cmds; do
|
||||||
|
__fzf_defc "$cmd" _fzf_var_completion "-o default -o nospace -v"
|
||||||
|
done
|
||||||
|
|
||||||
# Kill completion
|
# Aliases
|
||||||
complete -F _fzf_complete_kill -o nospace -o default -o bashdefault kill
|
__fzf_defc unalias _fzf_alias_completion "-a"
|
||||||
|
|
||||||
# Host completion
|
# Processes
|
||||||
complete -F _fzf_complete_ssh -o default -o bashdefault ssh
|
__fzf_defc kill _fzf_proc_completion "-o default -o bashdefault"
|
||||||
complete -F _fzf_complete_telnet -o default -o bashdefault telnet
|
|
||||||
|
|
||||||
# Environment variables / Aliases
|
# ssh
|
||||||
complete -F _fzf_complete_unset -o default -o bashdefault unset
|
__fzf_defc ssh _fzf_complete_ssh "-o default -o bashdefault"
|
||||||
complete -F _fzf_complete_export -o default -o bashdefault export
|
|
||||||
complete -F _fzf_complete_unalias -o default -o bashdefault unalias
|
|
||||||
|
|
||||||
unset cmd d_cmds a_cmds x_cmds
|
unset cmd d_cmds a_cmds v_cmds
|
||||||
|
|
||||||
|
_fzf_setup_completion() {
|
||||||
|
local kind fn cmd
|
||||||
|
kind=$1
|
||||||
|
fn=_fzf_${1}_completion
|
||||||
|
if [[ $# -lt 2 ]] || ! type -t "$fn" > /dev/null; then
|
||||||
|
echo "usage: ${FUNCNAME[0]} path|dir|var|alias|host|proc COMMANDS..."
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
__fzf_orig_completion < <(complete -p "$@" 2> /dev/null)
|
||||||
|
for cmd in "$@"; do
|
||||||
|
case "$kind" in
|
||||||
|
dir) __fzf_defc "$cmd" "$fn" "-o nospace -o dirnames" ;;
|
||||||
|
var) __fzf_defc "$cmd" "$fn" "-o default -o nospace -v" ;;
|
||||||
|
alias) __fzf_defc "$cmd" "$fn" "-a" ;;
|
||||||
|
*) __fzf_defc "$cmd" "$fn" "-o default -o bashdefault" ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
#----END shfmt
|
||||||
|
|
||||||
|
fi
|
||||||
|
|||||||
@@ -1,69 +1,197 @@
|
|||||||
#!/bin/zsh
|
|
||||||
# ____ ____
|
# ____ ____
|
||||||
# / __/___ / __/
|
# / __/___ / __/
|
||||||
# / /_/_ / / /_
|
# / /_/_ / / /_
|
||||||
# / __/ / /_/ __/
|
# / __/ / /_/ __/
|
||||||
# /_/ /___/_/-completion.zsh
|
# /_/ /___/_/ completion.zsh
|
||||||
#
|
#
|
||||||
# - $FZF_TMUX (default: 0)
|
# - $FZF_TMUX (default: 0)
|
||||||
# - $FZF_TMUX_HEIGHT (default: '40%')
|
# - $FZF_TMUX_OPTS (default: empty)
|
||||||
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
# - $FZF_COMPLETION_TRIGGER (default: '**')
|
||||||
# - $FZF_COMPLETION_OPTS (default: empty)
|
# - $FZF_COMPLETION_OPTS (default: empty)
|
||||||
|
# - $FZF_COMPLETION_PATH_OPTS (default: empty)
|
||||||
|
# - $FZF_COMPLETION_DIR_OPTS (default: empty)
|
||||||
|
|
||||||
|
|
||||||
|
# Both branches of the following `if` do the same thing -- define
|
||||||
|
# __fzf_completion_options such that `eval $__fzf_completion_options` sets
|
||||||
|
# all options to the same values they currently have. We'll do just that at
|
||||||
|
# the bottom of the file after changing options to what we prefer.
|
||||||
|
#
|
||||||
|
# IMPORTANT: Until we get to the `emulate` line, all words that *can* be quoted
|
||||||
|
# *must* be quoted in order to prevent alias expansion. In addition, code must
|
||||||
|
# be written in a way works with any set of zsh options. This is very tricky, so
|
||||||
|
# careful when you change it.
|
||||||
|
#
|
||||||
|
# Start by loading the builtin zsh/parameter module. It provides `options`
|
||||||
|
# associative array that stores current shell options.
|
||||||
|
if 'zmodload' 'zsh/parameter' 2>'/dev/null' && (( ${+options} )); then
|
||||||
|
# This is the fast branch and it gets taken on virtually all Zsh installations.
|
||||||
|
#
|
||||||
|
# ${(kv)options[@]} expands to array of keys (option names) and values ("on"
|
||||||
|
# or "off"). The subsequent expansion# with (j: :) flag joins all elements
|
||||||
|
# together separated by spaces. __fzf_completion_options ends up with a value
|
||||||
|
# like this: "options=(shwordsplit off aliases on ...)".
|
||||||
|
__fzf_completion_options="options=(${(j: :)${(kv)options[@]}})"
|
||||||
|
else
|
||||||
|
# This branch is much slower because it forks to get the names of all
|
||||||
|
# zsh options. It's possible to eliminate this fork but it's not worth the
|
||||||
|
# trouble because this branch gets taken only on very ancient or broken
|
||||||
|
# zsh installations.
|
||||||
|
() {
|
||||||
|
# That `()` above defines an anonymous function. This is essentially a scope
|
||||||
|
# for local parameters. We use it to avoid polluting global scope.
|
||||||
|
'local' '__fzf_opt'
|
||||||
|
__fzf_completion_options="setopt"
|
||||||
|
# `set -o` prints one line for every zsh option. Each line contains option
|
||||||
|
# name, some spaces, and then either "on" or "off". We just want option names.
|
||||||
|
# Expansion with (@f) flag splits a string into lines. The outer expansion
|
||||||
|
# removes spaces and everything that follow them on every line. __fzf_opt
|
||||||
|
# ends up iterating over option names: shwordsplit, aliases, etc.
|
||||||
|
for __fzf_opt in "${(@)${(@f)$(set -o)}%% *}"; do
|
||||||
|
if [[ -o "$__fzf_opt" ]]; then
|
||||||
|
# Option $__fzf_opt is currently on, so remember to set it back on.
|
||||||
|
__fzf_completion_options+=" -o $__fzf_opt"
|
||||||
|
else
|
||||||
|
# Option $__fzf_opt is currently off, so remember to set it back off.
|
||||||
|
__fzf_completion_options+=" +o $__fzf_opt"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
# The value of __fzf_completion_options here looks like this:
|
||||||
|
# "setopt +o shwordsplit -o aliases ..."
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Enable the default zsh options (those marked with <Z> in `man zshoptions`)
|
||||||
|
# but without `aliases`. Aliases in functions are expanded when functions are
|
||||||
|
# defined, so if we disable aliases here, we'll be sure to have no pesky
|
||||||
|
# aliases in any of our functions. This way we won't need prefix every
|
||||||
|
# command with `command` or to quote every word to defend against global
|
||||||
|
# aliases. Note that `aliases` is not the only option that's important to
|
||||||
|
# control. There are several others that could wreck havoc if they are set
|
||||||
|
# to values we don't expect. With the following `emulate` command we
|
||||||
|
# sidestep this issue entirely.
|
||||||
|
'builtin' 'emulate' 'zsh' && 'builtin' 'setopt' 'no_aliases'
|
||||||
|
|
||||||
|
# This brace is the start of try-always block. The `always` part is like
|
||||||
|
# `finally` in lesser languages. We use it to *always* restore user options.
|
||||||
|
{
|
||||||
|
# The 'emulate' command should not be placed inside the interactive if check;
|
||||||
|
# placing it there fails to disable alias expansion. See #3731.
|
||||||
|
if [[ -o interactive ]]; then
|
||||||
|
|
||||||
# To use custom commands instead of find, override _fzf_compgen_{path,dir}
|
# To use custom commands instead of find, override _fzf_compgen_{path,dir}
|
||||||
if ! declare -f _fzf_compgen_path > /dev/null; then
|
#
|
||||||
_fzf_compgen_path() {
|
# _fzf_compgen_path() {
|
||||||
echo "$1"
|
# echo "$1"
|
||||||
command find -L "$1" \
|
# command find -L "$1" \
|
||||||
-name .git -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
|
||||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
# -a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||||
}
|
# }
|
||||||
fi
|
#
|
||||||
|
# _fzf_compgen_dir() {
|
||||||
if ! declare -f _fzf_compgen_dir > /dev/null; then
|
# command find -L "$1" \
|
||||||
_fzf_compgen_dir() {
|
# -name .git -prune -o -name .hg -prune -o -name .svn -prune -o -type d \
|
||||||
command find -L "$1" \
|
# -a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
||||||
-name .git -prune -o -name .svn -prune -o -type d \
|
# }
|
||||||
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
||||||
|
|
||||||
__fzfcmd_complete() {
|
#----BEGIN INCLUDE common.sh
|
||||||
[ -n "$TMUX_PANE" ] && [ "${FZF_TMUX:-0}" != 0 ] && [ ${LINES:-40} -gt 15 ] &&
|
# NOTE: Do not directly edit this section, which is copied from "common.sh".
|
||||||
echo "fzf-tmux -d${FZF_TMUX_HEIGHT:-40%}" || echo "fzf"
|
# To modify it, one can edit "common.sh" and run "./update.sh" to apply
|
||||||
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
|
__fzf_defaults() {
|
||||||
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
|
}
|
||||||
|
|
||||||
|
__fzf_exec_awk() {
|
||||||
|
if [[ -z ${__fzf_awk-} ]]; then
|
||||||
|
__fzf_awk=awk
|
||||||
|
if [[ $OSTYPE == solaris* && -x /usr/xpg4/bin/awk ]]; then
|
||||||
|
__fzf_awk=/usr/xpg4/bin/awk
|
||||||
|
elif command -v mawk > /dev/null 2>&1; then
|
||||||
|
local n x y z d
|
||||||
|
IFS=' .' read -r n x y z d <<< $(command mawk -W version 2> /dev/null)
|
||||||
|
[[ $n == mawk ]] && ((d >= 20230302 && (x * 1000 + y) * 1000 + z >= 1003004)) && __fzf_awk=mawk
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
LC_ALL=C exec "$__fzf_awk" "$@"
|
||||||
|
}
|
||||||
|
#----END INCLUDE
|
||||||
|
|
||||||
|
__fzf_comprun() {
|
||||||
|
if [[ "$(type _fzf_comprun 2>&1)" =~ function ]]; then
|
||||||
|
_fzf_comprun "$@"
|
||||||
|
elif [ -n "${TMUX_PANE-}" ] && { [ "${FZF_TMUX:-0}" != 0 ] || [ -n "${FZF_TMUX_OPTS-}" ]; }; then
|
||||||
|
shift
|
||||||
|
if [ -n "${FZF_TMUX_OPTS-}" ]; then
|
||||||
|
fzf-tmux ${(Q)${(Z+n+)FZF_TMUX_OPTS}} -- "$@"
|
||||||
|
else
|
||||||
|
fzf-tmux -d ${FZF_TMUX_HEIGHT:-40%} -- "$@"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
shift
|
||||||
|
fzf "$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extract the name of the command. e.g. ls; foo=1 ssh **<tab>
|
||||||
|
__fzf_extract_command() {
|
||||||
|
# Control completion with the "compstate" parameter, insert and list nothing
|
||||||
|
compstate[insert]=
|
||||||
|
compstate[list]=
|
||||||
|
cmd_word="${(Q)words[1]}"
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_generic_path_completion() {
|
__fzf_generic_path_completion() {
|
||||||
local base lbuf compgen fzf_opts suffix tail fzf dir leftover matches
|
local base lbuf compgen fzf_opts suffix tail dir leftover matches
|
||||||
# (Q) flag removes a quoting level: "foo\ bar" => "foo bar"
|
base=$1
|
||||||
base=${(Q)1}
|
|
||||||
lbuf=$2
|
lbuf=$2
|
||||||
compgen=$3
|
compgen=$3
|
||||||
fzf_opts=$4
|
fzf_opts=$4
|
||||||
suffix=$5
|
suffix=$5
|
||||||
tail=$6
|
tail=$6
|
||||||
fzf="$(__fzfcmd_complete)"
|
|
||||||
|
|
||||||
setopt localoptions nonomatch
|
setopt localoptions nonomatch
|
||||||
dir="$base"
|
if [[ $base = *'$('* ]] || [[ $base = *'<('* ]] || [[ $base = *'>('* ]] || [[ $base = *':='* ]] || [[ $base = *'`'* ]]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
eval "base=$base" 2> /dev/null || return
|
||||||
|
[[ $base = *"/"* ]] && dir="$base"
|
||||||
while [ 1 ]; do
|
while [ 1 ]; do
|
||||||
if [[ -z "$dir" || -d ${~dir} ]]; then
|
if [[ -z "$dir" || -d ${dir} ]]; then
|
||||||
leftover=${base/#"$dir"}
|
leftover=${base/#"$dir"}
|
||||||
leftover=${leftover/#\/}
|
leftover=${leftover/#\/}
|
||||||
[ -z "$dir" ] && dir='.'
|
[ -z "$dir" ] && dir='.'
|
||||||
[ "$dir" != "/" ] && dir="${dir/%\//}"
|
[ "$dir" != "/" ] && dir="${dir/%\//}"
|
||||||
dir=${~dir}
|
matches=$(
|
||||||
matches=$(eval "$compgen $(printf %q "$dir")" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_COMPLETION_OPTS" ${=fzf} ${=fzf_opts} -q "$leftover" | while read item; do
|
export FZF_DEFAULT_OPTS
|
||||||
echo -n "${(q)item}$suffix "
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --scheme=path" "${FZF_COMPLETION_OPTS-}")
|
||||||
done)
|
unset FZF_DEFAULT_COMMAND FZF_DEFAULT_OPTS_FILE
|
||||||
|
if declare -f "$compgen" > /dev/null; then
|
||||||
|
eval "$compgen $(printf %q "$dir")" | __fzf_comprun "$cmd_word" ${(Q)${(Z+n+)fzf_opts}} -q "$leftover"
|
||||||
|
else
|
||||||
|
if [[ $compgen =~ dir ]]; then
|
||||||
|
walker=dir,follow
|
||||||
|
rest=${FZF_COMPLETION_DIR_OPTS-}
|
||||||
|
else
|
||||||
|
walker=file,dir,follow,hidden
|
||||||
|
rest=${FZF_COMPLETION_PATH_OPTS-}
|
||||||
|
fi
|
||||||
|
__fzf_comprun "$cmd_word" ${(Q)${(Z+n+)fzf_opts}} -q "$leftover" --walker "$walker" --walker-root="$dir" ${(Q)${(Z+n+)rest}} < /dev/tty
|
||||||
|
fi | while read -r item; do
|
||||||
|
item="${item%$suffix}$suffix"
|
||||||
|
echo -n -E "${(q)item} "
|
||||||
|
done
|
||||||
|
)
|
||||||
matches=${matches% }
|
matches=${matches% }
|
||||||
if [ -n "$matches" ]; then
|
if [ -n "$matches" ]; then
|
||||||
LBUFFER="$lbuf$matches$tail"
|
LBUFFER="$lbuf$matches$tail"
|
||||||
fi
|
fi
|
||||||
zle redisplay
|
zle reset-prompt
|
||||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
dir=$(dirname "$dir")
|
dir=$(dirname "$dir")
|
||||||
@@ -81,69 +209,184 @@ _fzf_dir_completion() {
|
|||||||
"" "/" ""
|
"" "/" ""
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_feed_fifo() (
|
_fzf_feed_fifo() {
|
||||||
command rm -f "$1"
|
command rm -f "$1"
|
||||||
mkfifo "$1"
|
mkfifo "$1"
|
||||||
cat <&0 > "$1" &
|
cat <&0 > "$1" &|
|
||||||
)
|
}
|
||||||
|
|
||||||
_fzf_complete() {
|
_fzf_complete() {
|
||||||
local fifo fzf_opts lbuf fzf matches post
|
setopt localoptions ksh_arrays
|
||||||
|
# Split arguments around --
|
||||||
|
local args rest str_arg i sep
|
||||||
|
args=("$@")
|
||||||
|
sep=
|
||||||
|
for i in {0..${#args[@]}}; do
|
||||||
|
if [[ "${args[$i]-}" = -- ]]; then
|
||||||
|
sep=$i
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
if [[ -n "$sep" ]]; then
|
||||||
|
str_arg=
|
||||||
|
rest=("${args[@]:$((sep + 1)):${#args[@]}}")
|
||||||
|
args=("${args[@]:0:$sep}")
|
||||||
|
else
|
||||||
|
str_arg=$1
|
||||||
|
args=()
|
||||||
|
shift
|
||||||
|
rest=("$@")
|
||||||
|
fi
|
||||||
|
|
||||||
|
local fifo lbuf matches post
|
||||||
fifo="${TMPDIR:-/tmp}/fzf-complete-fifo-$$"
|
fifo="${TMPDIR:-/tmp}/fzf-complete-fifo-$$"
|
||||||
fzf_opts=$1
|
lbuf=${rest[0]}
|
||||||
lbuf=$2
|
post="${funcstack[1]}_post"
|
||||||
post="${funcstack[2]}_post"
|
|
||||||
type $post > /dev/null 2>&1 || post=cat
|
type $post > /dev/null 2>&1 || post=cat
|
||||||
|
|
||||||
fzf="$(__fzfcmd_complete)"
|
|
||||||
|
|
||||||
_fzf_feed_fifo "$fifo"
|
_fzf_feed_fifo "$fifo"
|
||||||
matches=$(cat "$fifo" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_COMPLETION_OPTS" ${=fzf} ${=fzf_opts} -q "${(Q)prefix}" | $post | tr '\n' ' ')
|
matches=$(
|
||||||
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse" "${FZF_COMPLETION_OPTS-} $str_arg") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' \
|
||||||
|
__fzf_comprun "$cmd_word" "${args[@]}" -q "${(Q)prefix}" < "$fifo" | $post | tr '\n' ' ')
|
||||||
if [ -n "$matches" ]; then
|
if [ -n "$matches" ]; then
|
||||||
LBUFFER="$lbuf$matches"
|
LBUFFER="$lbuf$matches"
|
||||||
fi
|
fi
|
||||||
zle redisplay
|
|
||||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
|
||||||
command rm -f "$fifo"
|
command rm -f "$fifo"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# To use custom hostname lists, override __fzf_list_hosts.
|
||||||
|
# The function is expected to print hostnames, one per line as well as in the
|
||||||
|
# desired sorting and with any duplicates removed, to standard output.
|
||||||
|
if ! declare -f __fzf_list_hosts > /dev/null; then
|
||||||
|
__fzf_list_hosts() {
|
||||||
|
command sort -u \
|
||||||
|
<(
|
||||||
|
# Note: To make the pathname expansion of "~/.ssh/config.d/*" work
|
||||||
|
# properly, we need to adjust the related shell options. We need to
|
||||||
|
# unset "NO_GLOB" (or reset "GLOB"), which disable the pathname
|
||||||
|
# expansion totally. We need to unset "DOT_GLOB" and set "CASE_GLOB"
|
||||||
|
# to avoid matching unwanted files. We need to set "NULL_GLOB" to
|
||||||
|
# avoid attempting to read the literal filename '~/.ssh/config.d/*'
|
||||||
|
# when no matching is found.
|
||||||
|
setopt GLOB NO_DOT_GLOB CASE_GLOB NO_NOMATCH NULL_GLOB
|
||||||
|
|
||||||
|
__fzf_exec_awk '
|
||||||
|
# Note: mawk <= 1.3.3-20090705 does not support the POSIX brackets of
|
||||||
|
# the form [[:blank:]], and Ubuntu 18.04 LTS still uses this
|
||||||
|
# 16-year-old mawk unfortunately. We need to use [ \t] instead.
|
||||||
|
match(tolower($0), /^[ \t]*host(name)?[ \t]*[ \t=]/) {
|
||||||
|
$0 = substr($0, RLENGTH + 1) # Remove "Host(name)?=?"
|
||||||
|
sub(/#.*/, "")
|
||||||
|
for (i = 1; i <= NF; i++)
|
||||||
|
if ($i !~ /[*?%]/)
|
||||||
|
print $i
|
||||||
|
}
|
||||||
|
' ~/.ssh/config ~/.ssh/config.d/* /etc/ssh/ssh_config 2> /dev/null
|
||||||
|
) \
|
||||||
|
<(
|
||||||
|
__fzf_exec_awk -F ',' '
|
||||||
|
match($0, /^[][a-zA-Z0-9.,:-]+/) {
|
||||||
|
$0 = substr($0, 1, RLENGTH)
|
||||||
|
gsub(/[][]|:[^,]*/, "")
|
||||||
|
for (i = 1; i <= NF; i++)
|
||||||
|
print $i
|
||||||
|
}
|
||||||
|
' ~/.ssh/known_hosts 2> /dev/null
|
||||||
|
) \
|
||||||
|
<(
|
||||||
|
__fzf_exec_awk '
|
||||||
|
{
|
||||||
|
sub(/#.*/, "")
|
||||||
|
for (i = 2; i <= NF; i++)
|
||||||
|
if ($i != "0.0.0.0")
|
||||||
|
print $i
|
||||||
|
}
|
||||||
|
' /etc/hosts 2> /dev/null
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
_fzf_complete_telnet() {
|
_fzf_complete_telnet() {
|
||||||
_fzf_complete '+m' "$@" < <(
|
_fzf_complete +m -- "$@" < <(__fzf_list_hosts)
|
||||||
command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0' |
|
|
||||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# The first and the only argument is the LBUFFER without the current word that contains the trigger.
|
||||||
|
# The current word without the trigger is in the $prefix variable passed from the caller.
|
||||||
_fzf_complete_ssh() {
|
_fzf_complete_ssh() {
|
||||||
_fzf_complete '+m' "$@" < <(
|
local -a tokens
|
||||||
command cat <(cat ~/.ssh/config /etc/ssh/ssh_config 2> /dev/null | command grep -i '^host' | command grep -v '*') \
|
tokens=(${(z)1})
|
||||||
<(command grep -oE '^[a-z0-9.,-]+' ~/.ssh/known_hosts | tr ',' '\n' | awk '{ print $1 " " $1 }') \
|
case ${tokens[-1]} in
|
||||||
<(command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0') |
|
-i|-F|-E)
|
||||||
awk '{if (length($2) > 0) {print $2}}' | sort -u
|
_fzf_path_completion "$prefix" "$1"
|
||||||
)
|
;;
|
||||||
|
*)
|
||||||
|
local user
|
||||||
|
[[ $prefix =~ @ ]] && user="${prefix%%@*}@"
|
||||||
|
_fzf_complete +m -- "$@" < <(__fzf_list_hosts | __fzf_exec_awk -v user="$user" '{print user $0}')
|
||||||
|
;;
|
||||||
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_complete_export() {
|
_fzf_complete_export() {
|
||||||
_fzf_complete '-m' "$@" < <(
|
_fzf_complete -m -- "$@" < <(
|
||||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_complete_unset() {
|
_fzf_complete_unset() {
|
||||||
_fzf_complete '-m' "$@" < <(
|
_fzf_complete -m -- "$@" < <(
|
||||||
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
declare -xp | sed 's/=.*//' | sed 's/.* //'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_complete_unalias() {
|
_fzf_complete_unalias() {
|
||||||
_fzf_complete '+m' "$@" < <(
|
_fzf_complete +m -- "$@" < <(
|
||||||
alias | sed 's/=.*//'
|
alias | sed 's/=.*//'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_fzf_complete_kill() {
|
||||||
|
local transformer
|
||||||
|
transformer='
|
||||||
|
if [[ $FZF_KEY =~ ctrl|alt|shift ]] && [[ -n $FZF_NTH ]]; then
|
||||||
|
nths=( ${FZF_NTH//,/ } )
|
||||||
|
new_nths=()
|
||||||
|
found=0
|
||||||
|
for nth in ${nths[@]}; do
|
||||||
|
if [[ $nth = $FZF_CLICK_HEADER_NTH ]]; then
|
||||||
|
found=1
|
||||||
|
else
|
||||||
|
new_nths+=($nth)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
[[ $found = 0 ]] && new_nths+=($FZF_CLICK_HEADER_NTH)
|
||||||
|
new_nths=${new_nths[*]}
|
||||||
|
new_nths=${new_nths// /,}
|
||||||
|
echo "change-nth($new_nths)+change-prompt($new_nths> )"
|
||||||
|
else
|
||||||
|
if [[ $FZF_NTH = $FZF_CLICK_HEADER_NTH ]]; then
|
||||||
|
echo "change-nth()+change-prompt(> )"
|
||||||
|
else
|
||||||
|
echo "change-nth($FZF_CLICK_HEADER_NTH)+change-prompt($FZF_CLICK_HEADER_WORD> )"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
'
|
||||||
|
_fzf_complete -m --header-lines=1 --no-preview --wrap --color fg:dim,nth:regular \
|
||||||
|
--bind "click-header:transform:$transformer" -- "$@" < <(
|
||||||
|
command ps -eo user,pid,ppid,start,time,command 2> /dev/null ||
|
||||||
|
command ps -eo user,pid,ppid,time,args 2> /dev/null || # For BusyBox
|
||||||
|
command ps --everyone --full --windows # For cygwin
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
_fzf_complete_kill_post() {
|
||||||
|
__fzf_exec_awk '{print $2}'
|
||||||
|
}
|
||||||
|
|
||||||
fzf-completion() {
|
fzf-completion() {
|
||||||
local tokens cmd prefix trigger tail fzf matches lbuf d_cmds
|
local tokens prefix trigger tail matches lbuf d_cmds cursor_pos cmd_word
|
||||||
setopt localoptions noshwordsplit noksh_arrays
|
setopt localoptions noshwordsplit noksh_arrays noposixbuiltins
|
||||||
|
|
||||||
# http://zsh.sourceforge.net/FAQ/zshfaq03.html
|
# http://zsh.sourceforge.net/FAQ/zshfaq03.html
|
||||||
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
||||||
@@ -153,32 +396,54 @@ fzf-completion() {
|
|||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cmd=${tokens[1]}
|
|
||||||
|
|
||||||
# Explicitly allow for empty trigger.
|
# Explicitly allow for empty trigger.
|
||||||
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
trigger=${FZF_COMPLETION_TRIGGER-'**'}
|
||||||
[ -z "$trigger" -a ${LBUFFER[-1]} = ' ' ] && tokens+=("")
|
[[ -z $trigger && ${LBUFFER[-1]} == ' ' ]] && tokens+=("")
|
||||||
|
|
||||||
|
# When the trigger starts with ';', it becomes a separate token
|
||||||
|
if [[ ${LBUFFER} = *"${tokens[-2]-}${tokens[-1]}" ]]; then
|
||||||
|
tokens[-2]="${tokens[-2]-}${tokens[-1]}"
|
||||||
|
tokens=(${tokens[0,-2]})
|
||||||
|
fi
|
||||||
|
|
||||||
|
lbuf=$LBUFFER
|
||||||
tail=${LBUFFER:$(( ${#LBUFFER} - ${#trigger} ))}
|
tail=${LBUFFER:$(( ${#LBUFFER} - ${#trigger} ))}
|
||||||
# Kill completion (do not require trigger sequence)
|
|
||||||
if [ $cmd = kill -a ${LBUFFER[-1]} = ' ' ]; then
|
|
||||||
fzf="$(__fzfcmd_complete)"
|
|
||||||
matches=$(ps -ef | sed 1d | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-50%} --min-height 15 --reverse $FZF_DEFAULT_OPTS --preview 'echo {}' --preview-window down:3:wrap $FZF_COMPLETION_OPTS" ${=fzf} -m | awk '{print $2}' | tr '\n' ' ')
|
|
||||||
if [ -n "$matches" ]; then
|
|
||||||
LBUFFER="$LBUFFER$matches"
|
|
||||||
fi
|
|
||||||
zle redisplay
|
|
||||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
|
||||||
# Trigger sequence given
|
# Trigger sequence given
|
||||||
elif [ ${#tokens} -gt 1 -a "$tail" = "$trigger" ]; then
|
if [ ${#tokens} -gt 1 -a "$tail" = "$trigger" ]; then
|
||||||
d_cmds=(${=FZF_COMPLETION_DIR_COMMANDS:-cd pushd rmdir})
|
d_cmds=(${=FZF_COMPLETION_DIR_COMMANDS-cd pushd rmdir})
|
||||||
|
|
||||||
|
{
|
||||||
|
cursor_pos=$CURSOR
|
||||||
|
# Move the cursor before the trigger to preserve word array elements when
|
||||||
|
# trigger chars like ';' or '`' would otherwise reset the 'words' array.
|
||||||
|
CURSOR=$((cursor_pos - ${#trigger} - 1))
|
||||||
|
# Check if at least one completion system (old or new) is active.
|
||||||
|
# If at least one user-defined completion widget is detected, nothing will
|
||||||
|
# be completed if neither the old nor the new completion system is enabled.
|
||||||
|
# In such cases, the 'zsh/compctl' module is loaded as a fallback.
|
||||||
|
if ! zmodload -F zsh/parameter p:functions 2>/dev/null || ! (( ${+functions[compdef]} )); then
|
||||||
|
zmodload -F zsh/compctl 2>/dev/null
|
||||||
|
fi
|
||||||
|
# Create a completion widget to access the 'words' array (man zshcompwid)
|
||||||
|
zle -C __fzf_extract_command .complete-word __fzf_extract_command
|
||||||
|
zle __fzf_extract_command
|
||||||
|
} always {
|
||||||
|
CURSOR=$cursor_pos
|
||||||
|
# Delete the completion widget
|
||||||
|
zle -D __fzf_extract_command 2>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
[ -z "$trigger" ] && prefix=${tokens[-1]} || prefix=${tokens[-1]:0:-${#trigger}}
|
[ -z "$trigger" ] && prefix=${tokens[-1]} || prefix=${tokens[-1]:0:-${#trigger}}
|
||||||
[ -z "${tokens[-1]}" ] && lbuf=$LBUFFER || lbuf=${LBUFFER:0:-${#tokens[-1]}}
|
if [[ $prefix = *'$('* ]] || [[ $prefix = *'<('* ]] || [[ $prefix = *'>('* ]] || [[ $prefix = *':='* ]] || [[ $prefix = *'`'* ]]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
[ -n "${tokens[-1]}" ] && lbuf=${lbuf:0:-${#tokens[-1]}}
|
||||||
|
|
||||||
if eval "type _fzf_complete_${cmd} > /dev/null"; then
|
if eval "noglob type _fzf_complete_${cmd_word} >/dev/null"; then
|
||||||
eval "prefix=\"$prefix\" _fzf_complete_${cmd} \"$lbuf\""
|
prefix="$prefix" eval _fzf_complete_${cmd_word} ${(q)lbuf}
|
||||||
elif [ ${d_cmds[(i)$cmd]} -le ${#d_cmds} ]; then
|
zle reset-prompt
|
||||||
|
elif [ ${d_cmds[(i)$cmd_word]} -le ${#d_cmds} ]; then
|
||||||
_fzf_dir_completion "$prefix" "$lbuf"
|
_fzf_dir_completion "$prefix" "$lbuf"
|
||||||
else
|
else
|
||||||
_fzf_path_completion "$prefix" "$lbuf"
|
_fzf_path_completion "$prefix" "$lbuf"
|
||||||
@@ -195,5 +460,13 @@ fzf-completion() {
|
|||||||
unset binding
|
unset binding
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Normal widget
|
||||||
zle -N fzf-completion
|
zle -N fzf-completion
|
||||||
bindkey '^I' fzf-completion
|
bindkey '^I' fzf-completion
|
||||||
|
fi
|
||||||
|
|
||||||
|
} always {
|
||||||
|
# Restore the original options.
|
||||||
|
eval $__fzf_completion_options
|
||||||
|
'unset' '__fzf_completion_options'
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,121 +1,171 @@
|
|||||||
# Key bindings
|
# ____ ____
|
||||||
# ------------
|
# / __/___ / __/
|
||||||
__fzf_select__() {
|
# / /_/_ / / /_
|
||||||
local cmd="${FZF_CTRL_T_COMMAND:-"command find -L . -mindepth 1 \\( -path '*/\\.*' -o -fstype 'devfs' -o -fstype 'devtmpfs' -o -fstype 'proc' \\) -prune \
|
# / __/ / /_/ __/
|
||||||
-o -type f -print \
|
# /_/ /___/_/ key-bindings.bash
|
||||||
-o -type d -print \
|
#
|
||||||
-o -type l -print 2> /dev/null | cut -b3-"}"
|
# - $FZF_TMUX_OPTS
|
||||||
eval "$cmd" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_CTRL_T_OPTS" fzf -m "$@" | while read -r item; do
|
# - $FZF_CTRL_T_COMMAND
|
||||||
printf '%q ' "$item"
|
# - $FZF_CTRL_T_OPTS
|
||||||
done
|
# - $FZF_CTRL_R_COMMAND
|
||||||
echo
|
# - $FZF_CTRL_R_OPTS
|
||||||
}
|
# - $FZF_ALT_C_COMMAND
|
||||||
|
# - $FZF_ALT_C_OPTS
|
||||||
|
|
||||||
if [[ $- =~ i ]]; then
|
if [[ $- =~ i ]]; then
|
||||||
|
|
||||||
__fzf_use_tmux__() {
|
|
||||||
[ -n "$TMUX_PANE" ] && [ "${FZF_TMUX:-0}" != 0 ] && [ ${LINES:-40} -gt 15 ]
|
# Key bindings
|
||||||
|
# ------------
|
||||||
|
|
||||||
|
#----BEGIN shfmt
|
||||||
|
#----BEGIN INCLUDE common.sh
|
||||||
|
# NOTE: Do not directly edit this section, which is copied from "common.sh".
|
||||||
|
# To modify it, one can edit "common.sh" and run "./update.sh" to apply
|
||||||
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
|
__fzf_defaults() {
|
||||||
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
|
}
|
||||||
|
|
||||||
|
__fzf_exec_awk() {
|
||||||
|
if [[ -z ${__fzf_awk-} ]]; then
|
||||||
|
__fzf_awk=awk
|
||||||
|
if [[ $OSTYPE == solaris* && -x /usr/xpg4/bin/awk ]]; then
|
||||||
|
__fzf_awk=/usr/xpg4/bin/awk
|
||||||
|
elif command -v mawk > /dev/null 2>&1; then
|
||||||
|
local n x y z d
|
||||||
|
IFS=' .' read -r n x y z d <<< $(command mawk -W version 2> /dev/null)
|
||||||
|
[[ $n == mawk ]] && ((d >= 20230302 && (x * 1000 + y) * 1000 + z >= 1003004)) && __fzf_awk=mawk
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
LC_ALL=C exec "$__fzf_awk" "$@"
|
||||||
|
}
|
||||||
|
#----END INCLUDE
|
||||||
|
|
||||||
|
__fzf_select__() {
|
||||||
|
FZF_DEFAULT_COMMAND=${FZF_CTRL_T_COMMAND:-} \
|
||||||
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=file,dir,follow,hidden --scheme=path" "${FZF_CTRL_T_OPTS-} -m") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) "$@" |
|
||||||
|
while read -r item; do
|
||||||
|
printf '%q ' "$item" # escape special chars
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzfcmd() {
|
__fzfcmd() {
|
||||||
__fzf_use_tmux__ &&
|
[[ -n ${TMUX_PANE-} ]] && { [[ ${FZF_TMUX:-0} != 0 ]] || [[ -n ${FZF_TMUX_OPTS-} ]]; } &&
|
||||||
echo "fzf-tmux -d${FZF_TMUX_HEIGHT:-40%}" || echo "fzf"
|
echo "fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- " || echo "fzf"
|
||||||
}
|
|
||||||
|
|
||||||
__fzf_select_tmux__() {
|
|
||||||
local height
|
|
||||||
height=${FZF_TMUX_HEIGHT:-40%}
|
|
||||||
if [[ $height =~ %$ ]]; then
|
|
||||||
height="-p ${height%\%}"
|
|
||||||
else
|
|
||||||
height="-l $height"
|
|
||||||
fi
|
|
||||||
|
|
||||||
tmux split-window $height "cd $(printf %q "$PWD"); FZF_DEFAULT_OPTS=$(printf %q "$FZF_DEFAULT_OPTS") PATH=$(printf %q "$PATH") FZF_CTRL_T_COMMAND=$(printf %q "$FZF_CTRL_T_COMMAND") FZF_CTRL_T_OPTS=$(printf %q "$FZF_CTRL_T_OPTS") bash -c 'source \"${BASH_SOURCE[0]}\"; RESULT=\"\$(__fzf_select__ --no-height)\"; tmux setb -b fzf \"\$RESULT\" \\; pasteb -b fzf -t $TMUX_PANE \\; deleteb -b fzf || tmux send-keys -t $TMUX_PANE \"\$RESULT\"'"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fzf-file-widget() {
|
fzf-file-widget() {
|
||||||
if __fzf_use_tmux__; then
|
local selected="$(__fzf_select__ "$@")"
|
||||||
__fzf_select_tmux__
|
READLINE_LINE="${READLINE_LINE:0:READLINE_POINT}$selected${READLINE_LINE:READLINE_POINT}"
|
||||||
else
|
READLINE_POINT=$((READLINE_POINT + ${#selected}))
|
||||||
local selected="$(__fzf_select__)"
|
|
||||||
READLINE_LINE="${READLINE_LINE:0:$READLINE_POINT}$selected${READLINE_LINE:$READLINE_POINT}"
|
|
||||||
READLINE_POINT=$(( READLINE_POINT + ${#selected} ))
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_cd__() {
|
__fzf_cd__() {
|
||||||
local cmd dir
|
local dir
|
||||||
cmd="${FZF_ALT_C_COMMAND:-"command find -L . \\( -path '*/\\.*' -o -fstype 'devfs' -o -fstype 'devtmpfs' -o -fstype 'proc' \\) -prune \
|
dir=$(
|
||||||
-o -type d -print 2> /dev/null | sed 1d | cut -b3-"}"
|
FZF_DEFAULT_COMMAND=${FZF_ALT_C_COMMAND:-} \
|
||||||
dir=$(eval "$cmd" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_ALT_C_OPTS" $(__fzfcmd) +m) && printf 'cd %q' "$dir"
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=dir,follow,hidden --scheme=path" "${FZF_ALT_C_OPTS-} +m") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd)
|
||||||
|
) && printf 'builtin cd -- %q' "$(builtin unset CDPATH && builtin cd -- "$dir" && builtin pwd)"
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_history__() (
|
if command -v perl > /dev/null; then
|
||||||
local line
|
__fzf_history__() {
|
||||||
shopt -u nocaseglob nocasematch
|
local output script
|
||||||
line=$(
|
script='BEGIN { getc; $/ = "\n\t"; $HISTCOUNT = $ENV{last_hist} + 1 } s/^[ *]//; s/\n/\n\t/gm; print $HISTCOUNT - $. . "\t$_" if !$seen{$_}++'
|
||||||
HISTTIMEFORMAT= history |
|
output=$(
|
||||||
FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} $FZF_DEFAULT_OPTS +s --tac -n2..,.. --tiebreak=index --bind=ctrl-r:toggle-sort $FZF_CTRL_R_OPTS +m" $(__fzfcmd) |
|
set +o pipefail
|
||||||
command grep '^ *[0-9]') &&
|
builtin fc -lnr -2147483648 |
|
||||||
if [[ $- =~ H ]]; then
|
last_hist=$(HISTTIMEFORMAT='' builtin history 1) command perl -n -l0 -e "$script" |
|
||||||
sed 's/^ *\([0-9]*\)\** .*/!\1/' <<< "$line"
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort,alt-r:toggle-raw --wrap-sign '"$'\t'"↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} +m --read0") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) --query "$READLINE_LINE"
|
||||||
|
) || return
|
||||||
|
READLINE_LINE=$(command perl -pe 's/^\d*\t//' <<< "$output")
|
||||||
|
if [[ -z $READLINE_POINT ]]; then
|
||||||
|
echo "$READLINE_LINE"
|
||||||
else
|
else
|
||||||
sed 's/^ *\([0-9]*\)\** *//' <<< "$line"
|
READLINE_POINT=0x7fffffff
|
||||||
fi
|
fi
|
||||||
)
|
}
|
||||||
|
else # awk - fallback for POSIX systems
|
||||||
|
__fzf_history__() {
|
||||||
|
local output script
|
||||||
|
[[ $(HISTTIMEFORMAT='' builtin history 1) =~ [[:digit:]]+ ]] # how many history entries
|
||||||
|
script='function P(b) { ++n; sub(/^[ *]/, "", b); if (!seen[b]++) { printf "%d\t%s%c", '$((BASH_REMATCH + 1))' - n, b, 0 } }
|
||||||
|
NR==1 { b = substr($0, 2); next }
|
||||||
|
/^\t/ { P(b); b = substr($0, 2); next }
|
||||||
|
{ b = b RS $0 }
|
||||||
|
END { if (NR) P(b) }'
|
||||||
|
output=$(
|
||||||
|
set +o pipefail
|
||||||
|
builtin fc -lnr -2147483648 2> /dev/null | # ( $'\t '<lines>$'\n' )* ; <lines> ::= [^\n]* ( $'\n'<lines> )*
|
||||||
|
__fzf_exec_awk "$script" | # ( <counter>$'\t'<lines>$'\000' )*
|
||||||
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort,alt-r:toggle-raw --wrap-sign '"$'\t'"↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} +m --read0") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) --query "$READLINE_LINE"
|
||||||
|
) || return
|
||||||
|
READLINE_LINE=${output#*$'\t'}
|
||||||
|
if [[ -z $READLINE_POINT ]]; then
|
||||||
|
echo "$READLINE_LINE"
|
||||||
|
else
|
||||||
|
READLINE_POINT=0x7fffffff
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ ! -o vi ]]; then
|
# Required to refresh the prompt after fzf
|
||||||
# Required to refresh the prompt after fzf
|
bind -m emacs-standard '"\er": redraw-current-line'
|
||||||
bind '"\er": redraw-current-line'
|
|
||||||
bind '"\e^": history-expand-line'
|
|
||||||
|
|
||||||
|
bind -m vi-command '"\C-z": emacs-editing-mode'
|
||||||
|
bind -m vi-insert '"\C-z": emacs-editing-mode'
|
||||||
|
bind -m emacs-standard '"\C-z": vi-editing-mode'
|
||||||
|
|
||||||
|
if ((BASH_VERSINFO[0] < 4)); then
|
||||||
# CTRL-T - Paste the selected file path into the command line
|
# CTRL-T - Paste the selected file path into the command line
|
||||||
if [ $BASH_VERSINFO -gt 3 ]; then
|
if [[ ${FZF_CTRL_T_COMMAND-x} != "" ]]; then
|
||||||
bind -x '"\C-t": "fzf-file-widget"'
|
bind -m emacs-standard '"\C-t": " \C-b\C-k \C-u`__fzf_select__`\e\C-e\er\C-a\C-y\C-h\C-e\e \C-y\ey\C-x\C-x\C-f\C-y\ey\C-_"'
|
||||||
elif __fzf_use_tmux__; then
|
bind -m vi-command '"\C-t": "\C-z\C-t\C-z"'
|
||||||
bind '"\C-t": " \C-u \C-a\C-k`__fzf_select_tmux__`\e\C-e\C-y\C-a\C-d\C-y\ey\C-h"'
|
bind -m vi-insert '"\C-t": "\C-z\C-t\C-z"'
|
||||||
else
|
|
||||||
bind '"\C-t": " \C-u \C-a\C-k`__fzf_select__`\e\C-e\C-y\C-a\C-y\ey\C-h\C-e\er \C-h"'
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# CTRL-R - Paste the selected command from history into the command line
|
# CTRL-R - Paste the selected command from history into the command line
|
||||||
bind '"\C-r": " \C-e\C-u`__fzf_history__`\e\C-e\e^\er"'
|
if [[ ${FZF_CTRL_R_COMMAND-x} != "" ]]; then
|
||||||
|
if [[ -n ${FZF_CTRL_R_COMMAND-} ]]; then
|
||||||
# ALT-C - cd into the selected directory
|
echo "warning: FZF_CTRL_R_COMMAND is set to a custom command, but custom commands are not yet supported for CTRL-R" >&2
|
||||||
bind '"\ec": " \C-e\C-u`__fzf_cd__`\e\C-e\er\C-m"'
|
fi
|
||||||
|
bind -m emacs-standard '"\C-r": "\C-e \C-u\C-y\ey\C-u`__fzf_history__`\e\C-e\er"'
|
||||||
|
bind -m vi-command '"\C-r": "\C-z\C-r\C-z"'
|
||||||
|
bind -m vi-insert '"\C-r": "\C-z\C-r\C-z"'
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
# We'd usually use "\e" to enter vi-movement-mode so we can do our magic,
|
|
||||||
# but this incurs a very noticeable delay of a half second or so,
|
|
||||||
# because many other commands start with "\e".
|
|
||||||
# Instead, we bind an unused key, "\C-x\C-a",
|
|
||||||
# to also enter vi-movement-mode,
|
|
||||||
# and then use that thereafter.
|
|
||||||
# (We imagine that "\C-x\C-a" is relatively unlikely to be in use.)
|
|
||||||
bind '"\C-x\C-a": vi-movement-mode'
|
|
||||||
|
|
||||||
bind '"\C-x\C-e": shell-expand-line'
|
|
||||||
bind '"\C-x\C-r": redraw-current-line'
|
|
||||||
bind '"\C-x^": history-expand-line'
|
|
||||||
|
|
||||||
# CTRL-T - Paste the selected file path into the command line
|
# CTRL-T - Paste the selected file path into the command line
|
||||||
# - FIXME: Selected items are attached to the end regardless of cursor position
|
if [[ ${FZF_CTRL_T_COMMAND-x} != "" ]]; then
|
||||||
if [ $BASH_VERSINFO -gt 3 ]; then
|
bind -m emacs-standard -x '"\C-t": fzf-file-widget'
|
||||||
bind -x '"\C-t": "fzf-file-widget"'
|
bind -m vi-command -x '"\C-t": fzf-file-widget'
|
||||||
elif __fzf_use_tmux__; then
|
bind -m vi-insert -x '"\C-t": fzf-file-widget'
|
||||||
bind '"\C-t": "\C-x\C-a$a \C-x\C-addi`__fzf_select_tmux__`\C-x\C-e\C-x\C-a0P$xa"'
|
|
||||||
else
|
|
||||||
bind '"\C-t": "\C-x\C-a$a \C-x\C-addi`__fzf_select__`\C-x\C-e\C-x\C-a0Px$a \C-x\C-r\C-x\C-axa "'
|
|
||||||
fi
|
fi
|
||||||
bind -m vi-command '"\C-t": "i\C-t"'
|
|
||||||
|
|
||||||
# CTRL-R - Paste the selected command from history into the command line
|
# CTRL-R - Paste the selected command from history into the command line
|
||||||
bind '"\C-r": "\C-x\C-addi`__fzf_history__`\C-x\C-e\C-x^\C-x\C-a$a\C-x\C-r"'
|
if [[ ${FZF_CTRL_R_COMMAND-x} != "" ]]; then
|
||||||
bind -m vi-command '"\C-r": "i\C-r"'
|
if [[ -n ${FZF_CTRL_R_COMMAND-} ]]; then
|
||||||
|
echo "warning: FZF_CTRL_R_COMMAND is set to a custom command, but custom commands are not yet supported for CTRL-R" >&2
|
||||||
# ALT-C - cd into the selected directory
|
fi
|
||||||
bind '"\ec": "\C-x\C-addi`__fzf_cd__`\C-x\C-e\C-x\C-r\C-m"'
|
bind -m emacs-standard -x '"\C-r": __fzf_history__'
|
||||||
bind -m vi-command '"\ec": "ddi`__fzf_cd__`\C-x\C-e\C-x\C-r\C-m"'
|
bind -m vi-command -x '"\C-r": __fzf_history__'
|
||||||
|
bind -m vi-insert -x '"\C-r": __fzf_history__'
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# ALT-C - cd into the selected directory
|
||||||
|
if [[ ${FZF_ALT_C_COMMAND-x} != "" ]]; then
|
||||||
|
bind -m emacs-standard '"\ec": " \C-b\C-k \C-u`__fzf_cd__`\e\C-e\er\C-m\C-y\C-h\e \C-y\ey\C-x\C-x\C-d\C-y\ey\C-_"'
|
||||||
|
bind -m vi-command '"\ec": "\C-z\ec\C-z"'
|
||||||
|
bind -m vi-insert '"\ec": "\C-z\ec\C-z"'
|
||||||
|
fi
|
||||||
|
#----END shfmt
|
||||||
|
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,87 +1,236 @@
|
|||||||
|
# ____ ____
|
||||||
|
# / __/___ / __/
|
||||||
|
# / /_/_ / / /_
|
||||||
|
# / __/ / /_/ __/
|
||||||
|
# /_/ /___/_/ key-bindings.fish
|
||||||
|
#
|
||||||
|
# - $FZF_TMUX_OPTS
|
||||||
|
# - $FZF_CTRL_T_COMMAND
|
||||||
|
# - $FZF_CTRL_T_OPTS
|
||||||
|
# - $FZF_CTRL_R_COMMAND
|
||||||
|
# - $FZF_CTRL_R_OPTS
|
||||||
|
# - $FZF_ALT_C_COMMAND
|
||||||
|
# - $FZF_ALT_C_OPTS
|
||||||
|
|
||||||
|
|
||||||
# Key bindings
|
# Key bindings
|
||||||
# ------------
|
# ------------
|
||||||
|
# The oldest supported fish version is 3.1b1. To maintain compatibility, the
|
||||||
|
# command substitution syntax $(cmd) should never be used, even behind a version
|
||||||
|
# check, otherwise the source command will fail on fish versions older than 3.4.0.
|
||||||
function fzf_key_bindings
|
function fzf_key_bindings
|
||||||
|
|
||||||
# Store last token in $dir as root for the 'find' command
|
# Check fish version
|
||||||
function fzf-file-widget -d "List files and folders"
|
set -l fish_ver (string match -r '^(\d+).(\d+)' $version 2> /dev/null; or echo 0\n0\n0)
|
||||||
set -l dir (commandline -t)
|
if test \( "$fish_ver[2]" -lt 3 \) -o \( "$fish_ver[2]" -eq 3 -a "$fish_ver[3]" -lt 1 \)
|
||||||
# The commandline token might be escaped, we need to unescape it.
|
echo "This script requires fish version 3.1b1 or newer." >&2
|
||||||
set dir (eval "printf '%s' $dir")
|
return 1
|
||||||
if [ ! -d "$dir" ]
|
else if not type -q fzf
|
||||||
set dir .
|
echo "fzf was not found in path." >&2
|
||||||
end
|
return 1
|
||||||
# Some 'find' versions print undesired duplicated slashes if the path ends with slashes.
|
|
||||||
set dir (string replace --regex '(.)/+$' '$1' "$dir")
|
|
||||||
|
|
||||||
# "-path \$dir'*/\\.*'" matches hidden files/folders inside $dir but not
|
|
||||||
# $dir itself, even if hidden.
|
|
||||||
set -q FZF_CTRL_T_COMMAND; or set -l FZF_CTRL_T_COMMAND "
|
|
||||||
command find -L \$dir -mindepth 1 \\( -path \$dir'*/\\.*' -o -fstype 'devfs' -o -fstype 'devtmpfs' \\) -prune \
|
|
||||||
-o -type f -print \
|
|
||||||
-o -type d -print \
|
|
||||||
-o -type l -print 2> /dev/null | sed 's#^\./##'"
|
|
||||||
|
|
||||||
set -q FZF_TMUX_HEIGHT; or set FZF_TMUX_HEIGHT 40%
|
|
||||||
begin
|
|
||||||
set -lx FZF_DEFAULT_OPTS "--height $FZF_TMUX_HEIGHT --reverse $FZF_DEFAULT_OPTS $FZF_CTRL_T_OPTS"
|
|
||||||
eval "$FZF_CTRL_T_COMMAND | "(__fzfcmd)" -m" | while read -l r; set result $result $r; end
|
|
||||||
end
|
|
||||||
if [ -z "$result" ]
|
|
||||||
commandline -f repaint
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
if [ "$dir" != . ]
|
|
||||||
# Remove last token from commandline.
|
|
||||||
commandline -t ""
|
|
||||||
end
|
|
||||||
for i in $result
|
|
||||||
commandline -it -- (string escape $i)
|
|
||||||
commandline -it -- ' '
|
|
||||||
end
|
|
||||||
commandline -f repaint
|
|
||||||
end
|
end
|
||||||
|
|
||||||
function fzf-history-widget -d "Show command history"
|
function __fzf_defaults
|
||||||
set -q FZF_TMUX_HEIGHT; or set FZF_TMUX_HEIGHT 40%
|
# $argv[1]: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||||
begin
|
# $argv[2..]: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||||
set -lx FZF_DEFAULT_OPTS "--height $FZF_TMUX_HEIGHT $FZF_DEFAULT_OPTS +s --tiebreak=index --bind=ctrl-r:toggle-sort $FZF_CTRL_R_OPTS +m"
|
test -n "$FZF_TMUX_HEIGHT"; or set -l FZF_TMUX_HEIGHT 40%
|
||||||
history | eval (__fzfcmd) -q '(commandline)' | read -l result
|
string join ' ' -- \
|
||||||
and commandline -- $result
|
"--height $FZF_TMUX_HEIGHT --min-height=20+ --bind=ctrl-z:ignore" $argv[1] \
|
||||||
end
|
(test -r "$FZF_DEFAULT_OPTS_FILE"; and string join -- ' ' <$FZF_DEFAULT_OPTS_FILE) \
|
||||||
commandline -f repaint
|
$FZF_DEFAULT_OPTS $argv[2..-1]
|
||||||
end
|
|
||||||
|
|
||||||
function fzf-cd-widget -d "Change directory"
|
|
||||||
set -q FZF_ALT_C_COMMAND; or set -l FZF_ALT_C_COMMAND "
|
|
||||||
command find -L . \\( -path '*/\\.*' -o -fstype 'devfs' -o -fstype 'devtmpfs' \\) -prune \
|
|
||||||
-o -type d -print 2> /dev/null | sed 1d | cut -b3-"
|
|
||||||
set -q FZF_TMUX_HEIGHT; or set FZF_TMUX_HEIGHT 40%
|
|
||||||
begin
|
|
||||||
set -lx FZF_DEFAULT_OPTS "--height $FZF_TMUX_HEIGHT --reverse $FZF_DEFAULT_OPTS $FZF_ALT_C_OPTS"
|
|
||||||
eval "$FZF_ALT_C_COMMAND | "(__fzfcmd)" +m" | read -l result
|
|
||||||
[ "$result" ]; and cd $result
|
|
||||||
end
|
|
||||||
commandline -f repaint
|
|
||||||
end
|
end
|
||||||
|
|
||||||
function __fzfcmd
|
function __fzfcmd
|
||||||
set -q FZF_TMUX; or set FZF_TMUX 0
|
test -n "$FZF_TMUX_HEIGHT"; or set -l FZF_TMUX_HEIGHT 40%
|
||||||
set -q FZF_TMUX_HEIGHT; or set FZF_TMUX_HEIGHT 40%
|
if test -n "$FZF_TMUX_OPTS"
|
||||||
if [ $FZF_TMUX -eq 1 ]
|
echo "fzf-tmux $FZF_TMUX_OPTS -- "
|
||||||
echo "fzf-tmux -d$FZF_TMUX_HEIGHT"
|
else if test "$FZF_TMUX" = "1"
|
||||||
|
echo "fzf-tmux -d$FZF_TMUX_HEIGHT -- "
|
||||||
else
|
else
|
||||||
echo "fzf"
|
echo "fzf"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
bind \ct fzf-file-widget
|
function __fzf_parse_commandline -d 'Parse the current command line token and return split of existing filepath, fzf query, and optional -option= prefix'
|
||||||
bind \cr fzf-history-widget
|
set -l fzf_query ''
|
||||||
bind \ec fzf-cd-widget
|
set -l prefix ''
|
||||||
|
set -l dir '.'
|
||||||
|
|
||||||
if bind -M insert > /dev/null 2>&1
|
# Set variables containing the major and minor fish version numbers, using
|
||||||
bind -M insert \ct fzf-file-widget
|
# a method compatible with all supported fish versions.
|
||||||
|
set -l -- fish_major (string match -r -- '^\d+' $version)
|
||||||
|
set -l -- fish_minor (string match -r -- '^\d+\.(\d+)' $version)[2]
|
||||||
|
|
||||||
|
# fish v3.3.0 and newer: Don't use option prefix if " -- " is preceded.
|
||||||
|
set -l -- match_regex '(?<fzf_query>[\s\S]*?(?=\n?$)$)'
|
||||||
|
set -l -- prefix_regex '^-[^\s=]+=|^-(?!-)\S'
|
||||||
|
if test "$fish_major" -eq 3 -a "$fish_minor" -lt 3
|
||||||
|
or string match -q -v -- '* -- *' (string sub -l (commandline -Cp) -- (commandline -p))
|
||||||
|
set -- match_regex "(?<prefix>$prefix_regex)?$match_regex"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set $prefix and expanded $fzf_query with preserved trailing newlines.
|
||||||
|
if test "$fish_major" -ge 4
|
||||||
|
# fish v4.0.0 and newer
|
||||||
|
string match -q -r -- $match_regex (commandline --current-token --tokens-expanded | string collect -N)
|
||||||
|
else if test "$fish_major" -eq 3 -a "$fish_minor" -ge 2
|
||||||
|
# fish v3.2.0 - v3.7.1 (last v3)
|
||||||
|
string match -q -r -- $match_regex (commandline --current-token --tokenize | string collect -N)
|
||||||
|
eval set -- fzf_query (string escape -n -- $fzf_query | string replace -r -a '^\\\(?=~)|\\\(?=\$\w)' '')
|
||||||
|
else
|
||||||
|
# fish older than v3.2.0 (v3.1b1 - v3.1.2)
|
||||||
|
set -l -- cl_token (commandline --current-token --tokenize | string collect -N)
|
||||||
|
set -- prefix (string match -r -- $prefix_regex $cl_token)
|
||||||
|
set -- fzf_query (string replace -- "$prefix" '' $cl_token | string collect -N)
|
||||||
|
eval set -- fzf_query (string escape -n -- $fzf_query | string replace -r -a '^\\\(?=~)|\\\(?=\$\w)|\\\n\\\n$' '')
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -n "$fzf_query"
|
||||||
|
# Normalize path in $fzf_query, set $dir to the longest existing directory.
|
||||||
|
if test \( "$fish_major" -ge 4 \) -o \( "$fish_major" -eq 3 -a "$fish_minor" -ge 5 \)
|
||||||
|
# fish v3.5.0 and newer
|
||||||
|
set -- fzf_query (path normalize -- $fzf_query)
|
||||||
|
set -- dir $fzf_query
|
||||||
|
while not path is -d $dir
|
||||||
|
set -- dir (path dirname $dir)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
# fish older than v3.5.0 (v3.1b1 - v3.4.1)
|
||||||
|
if test "$fish_major" -eq 3 -a "$fish_minor" -ge 2
|
||||||
|
# fish v3.2.0 - v3.4.1
|
||||||
|
string match -q -r -- '(?<fzf_query>^[\s\S]*?(?=\n?$)$)' \
|
||||||
|
(string replace -r -a -- '(?<=/)/|(?<!^)/+(?!\n)$' '' $fzf_query | string collect -N)
|
||||||
|
else
|
||||||
|
# fish v3.1b1 - v3.1.2
|
||||||
|
set -- fzf_query (string replace -r -a -- '(?<=/)/|(?<!^)/+(?!\n)$' '' $fzf_query | string collect -N)
|
||||||
|
eval set -- fzf_query (string escape -n -- $fzf_query | string replace -r '\\\n$' '')
|
||||||
|
end
|
||||||
|
set -- dir $fzf_query
|
||||||
|
while not test -d "$dir"
|
||||||
|
set -- dir (dirname -z -- "$dir" | string split0)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if not string match -q -- '.' $dir; or string match -q -r -- '^\./|^\.$' $fzf_query
|
||||||
|
# Strip $dir from $fzf_query - preserve trailing newlines.
|
||||||
|
if test "$fish_major" -ge 4
|
||||||
|
# fish v4.0.0 and newer
|
||||||
|
string match -q -r -- '^'(string escape --style=regex -- $dir)'/?(?<fzf_query>[\s\S]*)' $fzf_query
|
||||||
|
else if test "$fish_major" -eq 3 -a "$fish_minor" -ge 2
|
||||||
|
# fish v3.2.0 - v3.7.1 (last v3)
|
||||||
|
string match -q -r -- '^/?(?<fzf_query>[\s\S]*?(?=\n?$)$)' \
|
||||||
|
(string replace -- "$dir" '' $fzf_query | string collect -N)
|
||||||
|
else
|
||||||
|
# fish older than v3.2.0 (v3.1b1 - v3.1.2)
|
||||||
|
set -- fzf_query (string replace -- "$dir" '' $fzf_query | string collect -N)
|
||||||
|
eval set -- fzf_query (string escape -n -- $fzf_query | string replace -r -a '^/?|\\\n$' '')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
string escape -n -- "$dir" "$fzf_query" "$prefix"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Store current token in $dir as root for the 'find' command
|
||||||
|
function fzf-file-widget -d "List files and folders"
|
||||||
|
set -l commandline (__fzf_parse_commandline)
|
||||||
|
set -lx dir $commandline[1]
|
||||||
|
set -l fzf_query $commandline[2]
|
||||||
|
set -l prefix $commandline[3]
|
||||||
|
|
||||||
|
set -lx FZF_DEFAULT_OPTS (__fzf_defaults \
|
||||||
|
"--reverse --walker=file,dir,follow,hidden --scheme=path" \
|
||||||
|
"$FZF_CTRL_T_OPTS --multi --print0")
|
||||||
|
|
||||||
|
set -lx FZF_DEFAULT_COMMAND "$FZF_CTRL_T_COMMAND"
|
||||||
|
set -lx FZF_DEFAULT_OPTS_FILE
|
||||||
|
|
||||||
|
set -l result (eval (__fzfcmd) --walker-root=$dir --query=$fzf_query | string split0)
|
||||||
|
and commandline -rt -- (string join -- ' ' $prefix(string escape -- $result))' '
|
||||||
|
|
||||||
|
commandline -f repaint
|
||||||
|
end
|
||||||
|
|
||||||
|
function fzf-history-widget -d "Show command history"
|
||||||
|
set -l -- command_line (commandline)
|
||||||
|
set -l -- current_line (commandline -L)
|
||||||
|
set -l -- total_lines (count $command_line)
|
||||||
|
set -l -- fzf_query (string escape -- $command_line[$current_line])
|
||||||
|
|
||||||
|
set -lx FZF_DEFAULT_OPTS (__fzf_defaults '' \
|
||||||
|
'--nth=2..,.. --scheme=history --multi --wrap-sign="\t↳ "' \
|
||||||
|
'--bind=\'shift-delete:execute-silent(eval history delete --exact --case-sensitive -- (string escape -n -- {+} | string replace -r -a "^\d*\\\\\\t|(?<=\\\\\\n)\\\\\\t" ""))+reload(eval $FZF_DEFAULT_COMMAND)\'' \
|
||||||
|
"--bind=ctrl-r:toggle-sort,alt-r:toggle-raw --highlight-line $FZF_CTRL_R_OPTS" \
|
||||||
|
'--accept-nth=2.. --read0 --print0 --with-shell='(status fish-path)\\ -c)
|
||||||
|
|
||||||
|
set -lx FZF_DEFAULT_OPTS_FILE
|
||||||
|
set -lx FZF_DEFAULT_COMMAND
|
||||||
|
|
||||||
|
if type -q perl
|
||||||
|
set -a FZF_DEFAULT_OPTS '--tac'
|
||||||
|
set FZF_DEFAULT_COMMAND 'builtin history -z --reverse | command perl -0 -pe \'s/^/$.\t/g; s/\n/\n\t/gm\''
|
||||||
|
else
|
||||||
|
set FZF_DEFAULT_COMMAND \
|
||||||
|
'set -l h (builtin history -z --reverse | string split0);' \
|
||||||
|
'for i in (seq (count $h) -1 1);' \
|
||||||
|
'string join0 -- $i\t(string replace -a -- \n \n\t $h[$i] | string collect);' \
|
||||||
|
'end'
|
||||||
|
end
|
||||||
|
|
||||||
|
# Merge history from other sessions before searching
|
||||||
|
test -z "$fish_private_mode"; and builtin history merge
|
||||||
|
|
||||||
|
if set -l result (eval $FZF_DEFAULT_COMMAND \| (__fzfcmd) --query=$fzf_query | string split0)
|
||||||
|
if test "$total_lines" -eq 1
|
||||||
|
commandline -- (string replace -a -- \n\t \n $result)
|
||||||
|
else
|
||||||
|
set -l a (math $current_line - 1)
|
||||||
|
set -l b (math $current_line + 1)
|
||||||
|
commandline -- $command_line[1..$a] (string replace -a -- \n\t \n $result)
|
||||||
|
commandline -a -- '' $command_line[$b..-1]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
commandline -f repaint
|
||||||
|
end
|
||||||
|
|
||||||
|
function fzf-cd-widget -d "Change directory"
|
||||||
|
set -l commandline (__fzf_parse_commandline)
|
||||||
|
set -lx dir $commandline[1]
|
||||||
|
set -l fzf_query $commandline[2]
|
||||||
|
set -l prefix $commandline[3]
|
||||||
|
|
||||||
|
set -lx FZF_DEFAULT_OPTS (__fzf_defaults \
|
||||||
|
"--reverse --walker=dir,follow,hidden --scheme=path" \
|
||||||
|
"$FZF_ALT_C_OPTS --no-multi --print0")
|
||||||
|
|
||||||
|
set -lx FZF_DEFAULT_OPTS_FILE
|
||||||
|
set -lx FZF_DEFAULT_COMMAND "$FZF_ALT_C_COMMAND"
|
||||||
|
|
||||||
|
if set -l result (eval (__fzfcmd) --query=$fzf_query --walker-root=$dir | string split0)
|
||||||
|
cd -- $result
|
||||||
|
commandline -rt -- $prefix
|
||||||
|
end
|
||||||
|
|
||||||
|
commandline -f repaint
|
||||||
|
end
|
||||||
|
|
||||||
|
if not set -q FZF_CTRL_R_COMMAND; or test -n "$FZF_CTRL_R_COMMAND"
|
||||||
|
if test -n "$FZF_CTRL_R_COMMAND"
|
||||||
|
echo "warning: FZF_CTRL_R_COMMAND is set to a custom command, but custom commands are not yet supported for CTRL-R" >&2
|
||||||
|
end
|
||||||
|
bind \cr fzf-history-widget
|
||||||
bind -M insert \cr fzf-history-widget
|
bind -M insert \cr fzf-history-widget
|
||||||
|
end
|
||||||
|
|
||||||
|
if not set -q FZF_CTRL_T_COMMAND; or test -n "$FZF_CTRL_T_COMMAND"
|
||||||
|
bind \ct fzf-file-widget
|
||||||
|
bind -M insert \ct fzf-file-widget
|
||||||
|
end
|
||||||
|
|
||||||
|
if not set -q FZF_ALT_C_COMMAND; or test -n "$FZF_ALT_C_COMMAND"
|
||||||
|
bind \ec fzf-cd-widget
|
||||||
bind -M insert \ec fzf-cd-widget
|
bind -M insert \ec fzf-cd-widget
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -1,74 +1,168 @@
|
|||||||
|
# ____ ____
|
||||||
|
# / __/___ / __/
|
||||||
|
# / /_/_ / / /_
|
||||||
|
# / __/ / /_/ __/
|
||||||
|
# /_/ /___/_/ key-bindings.zsh
|
||||||
|
#
|
||||||
|
# - $FZF_TMUX_OPTS
|
||||||
|
# - $FZF_CTRL_T_COMMAND
|
||||||
|
# - $FZF_CTRL_T_OPTS
|
||||||
|
# - $FZF_CTRL_R_COMMAND
|
||||||
|
# - $FZF_CTRL_R_OPTS
|
||||||
|
# - $FZF_ALT_C_COMMAND
|
||||||
|
# - $FZF_ALT_C_OPTS
|
||||||
|
|
||||||
|
|
||||||
# Key bindings
|
# Key bindings
|
||||||
# ------------
|
# ------------
|
||||||
if [[ $- == *i* ]]; then
|
|
||||||
|
# The code at the top and the bottom of this file is the same as in completion.zsh.
|
||||||
|
# Refer to that file for explanation.
|
||||||
|
if 'zmodload' 'zsh/parameter' 2>'/dev/null' && (( ${+options} )); then
|
||||||
|
__fzf_key_bindings_options="options=(${(j: :)${(kv)options[@]}})"
|
||||||
|
else
|
||||||
|
() {
|
||||||
|
__fzf_key_bindings_options="setopt"
|
||||||
|
'local' '__fzf_opt'
|
||||||
|
for __fzf_opt in "${(@)${(@f)$(set -o)}%% *}"; do
|
||||||
|
if [[ -o "$__fzf_opt" ]]; then
|
||||||
|
__fzf_key_bindings_options+=" -o $__fzf_opt"
|
||||||
|
else
|
||||||
|
__fzf_key_bindings_options+=" +o $__fzf_opt"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
'builtin' 'emulate' 'zsh' && 'builtin' 'setopt' 'no_aliases'
|
||||||
|
|
||||||
|
{
|
||||||
|
if [[ -o interactive ]]; then
|
||||||
|
|
||||||
|
#----BEGIN INCLUDE common.sh
|
||||||
|
# NOTE: Do not directly edit this section, which is copied from "common.sh".
|
||||||
|
# To modify it, one can edit "common.sh" and run "./update.sh" to apply
|
||||||
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
|
__fzf_defaults() {
|
||||||
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
|
}
|
||||||
|
|
||||||
|
__fzf_exec_awk() {
|
||||||
|
if [[ -z ${__fzf_awk-} ]]; then
|
||||||
|
__fzf_awk=awk
|
||||||
|
if [[ $OSTYPE == solaris* && -x /usr/xpg4/bin/awk ]]; then
|
||||||
|
__fzf_awk=/usr/xpg4/bin/awk
|
||||||
|
elif command -v mawk > /dev/null 2>&1; then
|
||||||
|
local n x y z d
|
||||||
|
IFS=' .' read -r n x y z d <<< $(command mawk -W version 2> /dev/null)
|
||||||
|
[[ $n == mawk ]] && ((d >= 20230302 && (x * 1000 + y) * 1000 + z >= 1003004)) && __fzf_awk=mawk
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
LC_ALL=C exec "$__fzf_awk" "$@"
|
||||||
|
}
|
||||||
|
#----END INCLUDE
|
||||||
|
|
||||||
# CTRL-T - Paste the selected file path(s) into the command line
|
# CTRL-T - Paste the selected file path(s) into the command line
|
||||||
__fsel() {
|
__fzf_select() {
|
||||||
local cmd="${FZF_CTRL_T_COMMAND:-"command find -L . -mindepth 1 \\( -path '*/\\.*' -o -fstype 'devfs' -o -fstype 'devtmpfs' -o -fstype 'proc' \\) -prune \
|
setopt localoptions pipefail no_aliases 2> /dev/null
|
||||||
-o -type f -print \
|
local item
|
||||||
-o -type d -print \
|
FZF_DEFAULT_COMMAND=${FZF_CTRL_T_COMMAND:-} \
|
||||||
-o -type l -print 2> /dev/null | cut -b3-"}"
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=file,dir,follow,hidden --scheme=path" "${FZF_CTRL_T_OPTS-} -m") \
|
||||||
setopt localoptions pipefail 2> /dev/null
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) "$@" < /dev/tty | while read -r item; do
|
||||||
eval "$cmd" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_CTRL_T_OPTS" $(__fzfcmd) -m "$@" | while read item; do
|
echo -n -E "${(q)item} "
|
||||||
echo -n "${(q)item} "
|
|
||||||
done
|
done
|
||||||
local ret=$?
|
local ret=$?
|
||||||
echo
|
echo
|
||||||
return $ret
|
return $ret
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_use_tmux__() {
|
|
||||||
[ -n "$TMUX_PANE" ] && [ "${FZF_TMUX:-0}" != 0 ] && [ ${LINES:-40} -gt 15 ]
|
|
||||||
}
|
|
||||||
|
|
||||||
__fzfcmd() {
|
__fzfcmd() {
|
||||||
__fzf_use_tmux__ &&
|
[ -n "${TMUX_PANE-}" ] && { [ "${FZF_TMUX:-0}" != 0 ] || [ -n "${FZF_TMUX_OPTS-}" ]; } &&
|
||||||
echo "fzf-tmux -d${FZF_TMUX_HEIGHT:-40%}" || echo "fzf"
|
echo "fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- " || echo "fzf"
|
||||||
}
|
}
|
||||||
|
|
||||||
fzf-file-widget() {
|
fzf-file-widget() {
|
||||||
LBUFFER="${LBUFFER}$(__fsel)"
|
LBUFFER="${LBUFFER}$(__fzf_select)"
|
||||||
local ret=$?
|
local ret=$?
|
||||||
zle redisplay
|
zle reset-prompt
|
||||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
|
||||||
return $ret
|
return $ret
|
||||||
}
|
}
|
||||||
zle -N fzf-file-widget
|
if [[ "${FZF_CTRL_T_COMMAND-x}" != "" ]]; then
|
||||||
bindkey '^T' fzf-file-widget
|
zle -N fzf-file-widget
|
||||||
|
bindkey -M emacs '^T' fzf-file-widget
|
||||||
|
bindkey -M vicmd '^T' fzf-file-widget
|
||||||
|
bindkey -M viins '^T' fzf-file-widget
|
||||||
|
fi
|
||||||
|
|
||||||
# ALT-C - cd into the selected directory
|
# ALT-C - cd into the selected directory
|
||||||
fzf-cd-widget() {
|
fzf-cd-widget() {
|
||||||
local cmd="${FZF_ALT_C_COMMAND:-"command find -L . \\( -path '*/\\.*' -o -fstype 'devfs' -o -fstype 'devtmpfs' -o -fstype 'proc' \\) -prune \
|
setopt localoptions pipefail no_aliases 2> /dev/null
|
||||||
-o -type d -print 2> /dev/null | sed 1d | cut -b3-"}"
|
local dir="$(
|
||||||
setopt localoptions pipefail 2> /dev/null
|
FZF_DEFAULT_COMMAND=${FZF_ALT_C_COMMAND:-} \
|
||||||
cd "${$(eval "$cmd" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_ALT_C_OPTS" $(__fzfcmd) +m):-.}"
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "--reverse --walker=dir,follow,hidden --scheme=path" "${FZF_ALT_C_OPTS-} +m") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd) < /dev/tty)"
|
||||||
|
if [[ -z "$dir" ]]; then
|
||||||
|
zle redisplay
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
zle push-line # Clear buffer. Auto-restored on next prompt.
|
||||||
|
BUFFER="builtin cd -- ${(q)dir:a}"
|
||||||
|
zle accept-line
|
||||||
local ret=$?
|
local ret=$?
|
||||||
|
unset dir # ensure this doesn't end up appearing in prompt expansion
|
||||||
zle reset-prompt
|
zle reset-prompt
|
||||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
|
||||||
return $ret
|
return $ret
|
||||||
}
|
}
|
||||||
zle -N fzf-cd-widget
|
if [[ "${FZF_ALT_C_COMMAND-x}" != "" ]]; then
|
||||||
bindkey '\ec' fzf-cd-widget
|
zle -N fzf-cd-widget
|
||||||
|
bindkey -M emacs '\ec' fzf-cd-widget
|
||||||
|
bindkey -M vicmd '\ec' fzf-cd-widget
|
||||||
|
bindkey -M viins '\ec' fzf-cd-widget
|
||||||
|
fi
|
||||||
|
|
||||||
# CTRL-R - Paste the selected command from history into the command line
|
# CTRL-R - Paste the selected command from history into the command line
|
||||||
fzf-history-widget() {
|
fzf-history-widget() {
|
||||||
local selected num
|
local selected
|
||||||
setopt localoptions noglobsubst pipefail 2> /dev/null
|
setopt localoptions noglobsubst noposixbuiltins pipefail no_aliases noglob nobash_rematch 2> /dev/null
|
||||||
selected=( $(fc -l 1 |
|
# Ensure the module is loaded if not already, and the required features, such
|
||||||
FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} $FZF_DEFAULT_OPTS +s --tac -n2..,.. --tiebreak=index --bind=ctrl-r:toggle-sort $FZF_CTRL_R_OPTS --query=${(q)LBUFFER} +m" $(__fzfcmd)) )
|
# as the associative 'history' array, which maps event numbers to full history
|
||||||
|
# lines, are set. Also, make sure Perl is installed for multi-line output.
|
||||||
|
if zmodload -F zsh/parameter p:{commands,history} 2>/dev/null && (( ${+commands[perl]} )); then
|
||||||
|
selected="$(printf '%s\t%s\000' "${(kv)history[@]}" |
|
||||||
|
perl -0 -ne 'if (!$seen{(/^\s*[0-9]+\**\t(.*)/s, $1)}++) { s/\n/\n\t/g; print; }' |
|
||||||
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort,alt-r:toggle-raw --wrap-sign '\t↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} --query=${(qqq)LBUFFER} +m --read0") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd))"
|
||||||
|
else
|
||||||
|
selected="$(fc -rl 1 | __fzf_exec_awk '{ cmd=$0; sub(/^[ \t]*[0-9]+\**[ \t]+/, "", cmd); if (!seen[cmd]++) print $0 }' |
|
||||||
|
FZF_DEFAULT_OPTS=$(__fzf_defaults "" "-n2..,.. --scheme=history --bind=ctrl-r:toggle-sort,alt-r:toggle-raw --wrap-sign '\t↳ ' --highlight-line ${FZF_CTRL_R_OPTS-} --query=${(qqq)LBUFFER} +m") \
|
||||||
|
FZF_DEFAULT_OPTS_FILE='' $(__fzfcmd))"
|
||||||
|
fi
|
||||||
local ret=$?
|
local ret=$?
|
||||||
if [ -n "$selected" ]; then
|
if [ -n "$selected" ]; then
|
||||||
num=$selected[1]
|
if [[ $(__fzf_exec_awk '{print $1; exit}' <<< "$selected") =~ ^[1-9][0-9]* ]]; then
|
||||||
if [ -n "$num" ]; then
|
zle vi-fetch-history -n $MATCH
|
||||||
zle vi-fetch-history -n $num
|
else # selected is a custom query, not from history
|
||||||
|
LBUFFER="$selected"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
zle redisplay
|
zle reset-prompt
|
||||||
typeset -f zle-line-init >/dev/null && zle zle-line-init
|
|
||||||
return $ret
|
return $ret
|
||||||
}
|
}
|
||||||
zle -N fzf-history-widget
|
if [[ ${FZF_CTRL_R_COMMAND-x} != "" ]]; then
|
||||||
bindkey '^R' fzf-history-widget
|
if [[ -n ${FZF_CTRL_R_COMMAND-} ]]; then
|
||||||
|
echo "warning: FZF_CTRL_R_COMMAND is set to a custom command, but custom commands are not yet supported for CTRL-R" >&2
|
||||||
|
fi
|
||||||
|
zle -N fzf-history-widget
|
||||||
|
bindkey -M emacs '^R' fzf-history-widget
|
||||||
|
bindkey -M vicmd '^R' fzf-history-widget
|
||||||
|
bindkey -M viins '^R' fzf-history-widget
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
} always {
|
||||||
|
eval $__fzf_key_bindings_options
|
||||||
|
'unset' '__fzf_key_bindings_options'
|
||||||
|
}
|
||||||
|
|||||||
68
shell/update.sh
Executable file
68
shell/update.sh
Executable file
@@ -0,0 +1,68 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# This script applies the contents of "common.sh" to the other files.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
dir=${0%"${0##*/}"}
|
||||||
|
|
||||||
|
update() {
|
||||||
|
{
|
||||||
|
sed -n '1,/^#----BEGIN INCLUDE common\.sh/p' "$1"
|
||||||
|
cat << EOF
|
||||||
|
# NOTE: Do not directly edit this section, which is copied from "common.sh".
|
||||||
|
# To modify it, one can edit "common.sh" and run "./update.sh" to apply
|
||||||
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
EOF
|
||||||
|
echo
|
||||||
|
grep -v '^[[:blank:]]*#' "$dir/common.sh" # remove code comments in common.sh
|
||||||
|
sed -n '/^#----END INCLUDE/,$p' "$1"
|
||||||
|
} > "$1.part"
|
||||||
|
|
||||||
|
mv -f "$1.part" "$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
update "$dir/completion.bash"
|
||||||
|
update "$dir/completion.zsh"
|
||||||
|
update "$dir/key-bindings.bash"
|
||||||
|
update "$dir/key-bindings.zsh"
|
||||||
|
|
||||||
|
# Check if --check is in ARGV
|
||||||
|
check=0
|
||||||
|
rest=()
|
||||||
|
for arg in "$@"; do
|
||||||
|
case $arg in
|
||||||
|
--check) check=1 ;;
|
||||||
|
*) rest+=("$arg") ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
fmt() {
|
||||||
|
if ! grep -q "^#----BEGIN shfmt" "$1"; then
|
||||||
|
if [[ $check == 1 ]]; then
|
||||||
|
shfmt -d "$1"
|
||||||
|
return $?
|
||||||
|
else
|
||||||
|
shfmt -w "$1"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
{
|
||||||
|
sed -n '1,/^#----BEGIN shfmt/p' "$1" | sed '$d'
|
||||||
|
sed -n '/^#----BEGIN shfmt/,/^#----END shfmt/p' "$1" | shfmt --filename "$1"
|
||||||
|
sed -n '/^#----END shfmt/,$p' "$1" | sed '1d'
|
||||||
|
} > "$1.part"
|
||||||
|
|
||||||
|
if [[ $check == 1 ]]; then
|
||||||
|
diff -q "$1" "$1.part"
|
||||||
|
ret=$?
|
||||||
|
rm -f "$1.part"
|
||||||
|
return $ret
|
||||||
|
fi
|
||||||
|
|
||||||
|
mv -f "$1.part" "$1"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
for file in "${rest[@]}"; do
|
||||||
|
fmt "$file" || exit $?
|
||||||
|
done
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
FROM ubuntu:14.04
|
|
||||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
|
||||||
|
|
||||||
# apt-get
|
|
||||||
RUN apt-get update && apt-get -y upgrade && \
|
|
||||||
apt-get install -y --force-yes git curl build-essential
|
|
||||||
|
|
||||||
# Install Go 1.4
|
|
||||||
RUN cd / && curl \
|
|
||||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
|
||||||
tar -xz && mv go go1.4 && \
|
|
||||||
sed -i 's@#define PTHREAD_KEYS_MAX 128@@' /go1.4/src/runtime/cgo/gcc_android_arm.c
|
|
||||||
|
|
||||||
ENV GOROOT /go1.4
|
|
||||||
ENV PATH /go1.4/bin:$PATH
|
|
||||||
|
|
||||||
RUN cd / && \
|
|
||||||
curl -O http://dl.google.com/android/ndk/android-ndk-r10e-linux-x86_64.bin && \
|
|
||||||
chmod 755 /android-ndk* && /android-ndk-r10e-linux-x86_64.bin && \
|
|
||||||
mv android-ndk-r10e /android-ndk
|
|
||||||
|
|
||||||
RUN cd /android-ndk && bash ./build/tools/make-standalone-toolchain.sh --platform=android-21 --install-dir=/ndk --arch=arm
|
|
||||||
|
|
||||||
ENV NDK_CC /ndk/bin/arm-linux-androideabi-gcc
|
|
||||||
|
|
||||||
RUN cd $GOROOT/src && \
|
|
||||||
CC_FOR_TARGET=$NDK_CC GOOS=android GOARCH=arm GOARM=7 ./make.bash
|
|
||||||
|
|
||||||
RUN cd / && curl \
|
|
||||||
http://ftp.gnu.org/gnu/ncurses/ncurses-5.9.tar.gz | \
|
|
||||||
tar -xz && cd /ncurses-5.9 && \
|
|
||||||
./configure CC=$NDK_CC CFLAGS="-fPIE -march=armv7-a -mfpu=neon -mhard-float -Wl,--no-warn-mismatch" LDFLAGS="-march=armv7-a -Wl,--no-warn-mismatch" --host=arm-linux --enable-overwrite --enable-const --without-cxx-binding --without-shared --without-debug --enable-widec --enable-ext-colors --enable-ext-mouse --enable-pc-files --with-pkg-config-libdir=$PKG_CONFIG_LIBDIR --without-manpages --without-ada --disable-shared --without-tests --prefix=/ndk/sysroot/usr --with-default-terminfo-dirs=/usr/share/terminfo --with-terminfo-dirs=/usr/share/terminfo ac_cv_header_locale_h=n ac_cv_func_getpwent=no ac_cv_func_getpwnam=no ac_cv_func_getpwuid=no && \
|
|
||||||
sed -i 's@#define HAVE_LOCALE_H 1@/* #undef HAVE_LOCALE_H */@' include/ncurses_cfg.h && \
|
|
||||||
make && \
|
|
||||||
sed -i '0,/echo.*/{s/echo.*/exit 0/}' misc/run_tic.sh && \
|
|
||||||
make install && \
|
|
||||||
mv /ndk/sysroot/usr/lib/libncursesw.a /ndk/sysroot/usr/lib/libncurses.a
|
|
||||||
|
|
||||||
# Default CMD
|
|
||||||
CMD cd /fzf/src && /bin/bash
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
FROM base/archlinux:2014.07.03
|
|
||||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
|
||||||
|
|
||||||
# apt-get
|
|
||||||
RUN pacman-key --populate archlinux && pacman-key --refresh-keys
|
|
||||||
RUN pacman-db-upgrade && pacman -Syu --noconfirm base-devel git
|
|
||||||
|
|
||||||
# Install Go 1.4
|
|
||||||
RUN cd / && curl \
|
|
||||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
|
||||||
tar -xz && mv go go1.4
|
|
||||||
|
|
||||||
ENV GOROOT /go1.4
|
|
||||||
ENV PATH /go1.4/bin:$PATH
|
|
||||||
|
|
||||||
# For i386 build
|
|
||||||
RUN echo '[multilib]' >> /etc/pacman.conf && \
|
|
||||||
echo 'Include = /etc/pacman.d/mirrorlist' >> /etc/pacman.conf && \
|
|
||||||
pacman-db-upgrade && yes | pacman -Sy gcc-multilib lib32-ncurses && \
|
|
||||||
cd $GOROOT/src && GOARCH=386 ./make.bash
|
|
||||||
|
|
||||||
# Default CMD
|
|
||||||
CMD cd /fzf/src && /bin/bash
|
|
||||||
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
FROM centos:centos6
|
|
||||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
|
||||||
|
|
||||||
# yum
|
|
||||||
RUN yum install -y git gcc make tar glibc-devel glibc-devel.i686 \
|
|
||||||
ncurses-devel ncurses-static ncurses-devel.i686 \
|
|
||||||
gpm-devel gpm-static libgcc.i686
|
|
||||||
|
|
||||||
# Install Go 1.4
|
|
||||||
RUN cd / && curl \
|
|
||||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
|
||||||
tar -xz && mv go go1.4
|
|
||||||
|
|
||||||
# Install Go 1.7
|
|
||||||
RUN cd / && curl \
|
|
||||||
https://storage.googleapis.com/golang/go1.7.linux-amd64.tar.gz | \
|
|
||||||
tar -xz && mv go go1.7
|
|
||||||
|
|
||||||
# Install RPMs for building static 32-bit binary
|
|
||||||
RUN curl ftp://ftp.pbone.net/mirror/ftp.centos.org/6.8/os/i386/Packages/ncurses-static-5.7-4.20090207.el6.i686.rpm -o rpm && rpm -i rpm && \
|
|
||||||
curl ftp://ftp.pbone.net/mirror/ftp.centos.org/6.8/os/i386/Packages/gpm-static-1.20.6-12.el6.i686.rpm -o rpm && rpm -i rpm
|
|
||||||
|
|
||||||
ENV GOROOT_BOOTSTRAP /go1.4
|
|
||||||
ENV GOROOT /go1.7
|
|
||||||
ENV PATH /go1.7/bin:$PATH
|
|
||||||
|
|
||||||
# For i386 build
|
|
||||||
RUN cd $GOROOT/src && GOARCH=386 ./make.bash
|
|
||||||
|
|
||||||
# Default CMD
|
|
||||||
CMD cd /fzf/src && /bin/bash
|
|
||||||
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
FROM ubuntu:14.04
|
|
||||||
MAINTAINER Junegunn Choi <junegunn.c@gmail.com>
|
|
||||||
|
|
||||||
# apt-get
|
|
||||||
RUN apt-get update && apt-get -y upgrade && \
|
|
||||||
apt-get install -y --force-yes git curl build-essential libncurses-dev libgpm-dev
|
|
||||||
|
|
||||||
# Install Go 1.4
|
|
||||||
RUN cd / && curl \
|
|
||||||
https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz | \
|
|
||||||
tar -xz && mv go go1.4
|
|
||||||
|
|
||||||
ENV GOROOT /go1.4
|
|
||||||
ENV PATH /go1.4/bin:$PATH
|
|
||||||
|
|
||||||
# For i386 build
|
|
||||||
RUN apt-get install -y lib32ncurses5-dev && \
|
|
||||||
cd $GOROOT/src && GOARCH=386 ./make.bash
|
|
||||||
|
|
||||||
# Default CMD
|
|
||||||
CMD cd /fzf/src && /bin/bash
|
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2017 Junegunn Choi
|
Copyright (c) 2013-2025 Junegunn Choi
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
170
src/Makefile
170
src/Makefile
@@ -1,170 +0,0 @@
|
|||||||
ifndef GOOS
|
|
||||||
UNAME_S := $(shell uname -s)
|
|
||||||
ifeq ($(UNAME_S),Darwin)
|
|
||||||
GOOS := darwin
|
|
||||||
else ifeq ($(UNAME_S),Linux)
|
|
||||||
GOOS := linux
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
SOURCES := $(wildcard *.go */*.go)
|
|
||||||
ROOTDIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
|
||||||
BINDIR := $(shell dirname $(ROOTDIR))/bin
|
|
||||||
GOPATH := $(shell dirname $(ROOTDIR))/gopath
|
|
||||||
SRCDIR := $(GOPATH)/src/github.com/junegunn/fzf/src
|
|
||||||
DOCKEROPTS := -i -t -v $(ROOTDIR):/fzf/src
|
|
||||||
BINARY32 := fzf-$(GOOS)_386
|
|
||||||
BINARY64 := fzf-$(GOOS)_amd64
|
|
||||||
BINARYARM5 := fzf-$(GOOS)_arm5
|
|
||||||
BINARYARM6 := fzf-$(GOOS)_arm6
|
|
||||||
BINARYARM7 := fzf-$(GOOS)_arm7
|
|
||||||
BINARYARM8 := fzf-$(GOOS)_arm8
|
|
||||||
VERSION := $(shell awk -F= '/version =/ {print $$2}' constants.go | tr -d "\" ")
|
|
||||||
RELEASE32 := fzf-$(VERSION)-$(GOOS)_386
|
|
||||||
RELEASE64 := fzf-$(VERSION)-$(GOOS)_amd64
|
|
||||||
RELEASEARM5 := fzf-$(VERSION)-$(GOOS)_arm5
|
|
||||||
RELEASEARM6 := fzf-$(VERSION)-$(GOOS)_arm6
|
|
||||||
RELEASEARM7 := fzf-$(VERSION)-$(GOOS)_arm7
|
|
||||||
RELEASEARM8 := fzf-$(VERSION)-$(GOOS)_arm8
|
|
||||||
export GOPATH
|
|
||||||
|
|
||||||
# https://en.wikipedia.org/wiki/Uname
|
|
||||||
UNAME_M := $(shell uname -m)
|
|
||||||
ifeq ($(UNAME_M),x86_64)
|
|
||||||
BINARY := $(BINARY64)
|
|
||||||
else ifeq ($(UNAME_M),amd64)
|
|
||||||
BINARY := $(BINARY64)
|
|
||||||
else ifeq ($(UNAME_M),i686)
|
|
||||||
BINARY := $(BINARY32)
|
|
||||||
else ifeq ($(UNAME_M),i386)
|
|
||||||
BINARY := $(BINARY32)
|
|
||||||
else ifeq ($(UNAME_M),armv5l)
|
|
||||||
BINARY := $(BINARYARM5)
|
|
||||||
else ifeq ($(UNAME_M),armv6l)
|
|
||||||
BINARY := $(BINARYARM6)
|
|
||||||
else ifeq ($(UNAME_M),armv7l)
|
|
||||||
BINARY := $(BINARYARM7)
|
|
||||||
else
|
|
||||||
$(error "Build on $(UNAME_M) is not supported, yet.")
|
|
||||||
endif
|
|
||||||
|
|
||||||
all: fzf/$(BINARY)
|
|
||||||
|
|
||||||
ifeq ($(GOOS),windows)
|
|
||||||
release: fzf/$(BINARY32) fzf/$(BINARY64)
|
|
||||||
cd fzf && cp $(BINARY32) $(RELEASE32).exe && zip $(RELEASE32).zip $(RELEASE32).exe
|
|
||||||
cd fzf && cp $(BINARY64) $(RELEASE64).exe && zip $(RELEASE64).zip $(RELEASE64).exe
|
|
||||||
cd fzf && rm -f $(RELEASE32).exe $(RELEASE64).exe
|
|
||||||
else ifeq ($(GOOS),linux)
|
|
||||||
release: fzf/$(BINARY32) fzf/$(BINARY64) fzf/$(BINARYARM5) fzf/$(BINARYARM6) fzf/$(BINARYARM7) fzf/$(BINARYARM8)
|
|
||||||
cd fzf && cp $(BINARY32) $(RELEASE32) && tar -czf $(RELEASE32).tgz $(RELEASE32)
|
|
||||||
cd fzf && cp $(BINARY64) $(RELEASE64) && tar -czf $(RELEASE64).tgz $(RELEASE64)
|
|
||||||
cd fzf && cp $(BINARYARM5) $(RELEASEARM5) && tar -czf $(RELEASEARM5).tgz $(RELEASEARM5)
|
|
||||||
cd fzf && cp $(BINARYARM6) $(RELEASEARM6) && tar -czf $(RELEASEARM6).tgz $(RELEASEARM6)
|
|
||||||
cd fzf && cp $(BINARYARM7) $(RELEASEARM7) && tar -czf $(RELEASEARM7).tgz $(RELEASEARM7)
|
|
||||||
cd fzf && cp $(BINARYARM8) $(RELEASEARM8) && tar -czf $(RELEASEARM8).tgz $(RELEASEARM8)
|
|
||||||
cd fzf && rm -f $(RELEASE32) $(RELEASE64) $(RELEASEARM5) $(RELEASEARM6) $(RELEASEARM7) $(RELEASEARM8)
|
|
||||||
else
|
|
||||||
release: fzf/$(BINARY32) fzf/$(BINARY64)
|
|
||||||
cd fzf && cp $(BINARY32) $(RELEASE32) && tar -czf $(RELEASE32).tgz $(RELEASE32)
|
|
||||||
cd fzf && cp $(BINARY64) $(RELEASE64) && tar -czf $(RELEASE64).tgz $(RELEASE64)
|
|
||||||
cd fzf && rm -f $(RELEASE32) $(RELEASE64)
|
|
||||||
endif
|
|
||||||
|
|
||||||
release-all: clean test
|
|
||||||
GOOS=darwin make release
|
|
||||||
GOOS=linux make release
|
|
||||||
GOOS=freebsd make release
|
|
||||||
GOOS=openbsd make release
|
|
||||||
GOOS=windows make release
|
|
||||||
|
|
||||||
$(SRCDIR):
|
|
||||||
mkdir -p $(shell dirname $(SRCDIR))
|
|
||||||
ln -s $(ROOTDIR) $(SRCDIR)
|
|
||||||
|
|
||||||
deps: $(SRCDIR) $(SOURCES)
|
|
||||||
cd $(SRCDIR) && go get -tags "$(TAGS)"
|
|
||||||
./deps
|
|
||||||
|
|
||||||
android-build: $(SRCDIR)
|
|
||||||
cd $(SRCDIR) && GOARCH=arm GOARM=7 CGO_ENABLED=1 go get
|
|
||||||
cd $(SRCDIR)/fzf && GOARCH=arm GOARM=7 CGO_ENABLED=1 go build -a -ldflags="-w -extldflags=-pie" -o $(BINARYARM7)
|
|
||||||
cd $(SRCDIR)/fzf && cp $(BINARYARM7) $(RELEASEARM7) && tar -czf $(RELEASEARM7).tgz $(RELEASEARM7) && \
|
|
||||||
rm -f $(RELEASEARM7)
|
|
||||||
|
|
||||||
test: deps
|
|
||||||
SHELL=/bin/sh GOOS= go test -v -tags "$(TAGS)" ./...
|
|
||||||
|
|
||||||
install: $(BINDIR)/fzf
|
|
||||||
|
|
||||||
uninstall:
|
|
||||||
rm -f $(BINDIR)/fzf $(BINDIR)/$(BINARY)
|
|
||||||
|
|
||||||
clean:
|
|
||||||
cd fzf && rm -f fzf-*
|
|
||||||
|
|
||||||
fzf/$(BINARY32): deps
|
|
||||||
cd fzf && GOARCH=386 go build -a -ldflags "-w -extldflags=$(LDFLAGS)" -tags "$(TAGS)" -o $(BINARY32)
|
|
||||||
|
|
||||||
fzf/$(BINARY64): deps
|
|
||||||
cd fzf && GOARCH=amd64 go build -a -ldflags "-w -extldflags=$(LDFLAGS)" -tags "$(TAGS)" -o $(BINARY64)
|
|
||||||
|
|
||||||
# https://github.com/golang/go/wiki/GoArm
|
|
||||||
fzf/$(BINARYARM5): deps
|
|
||||||
cd fzf && GOARCH=arm GOARM=5 go build -a -ldflags "-w -extldflags=$(LDFLAGS)" -tags "$(TAGS)" -o $(BINARYARM5)
|
|
||||||
|
|
||||||
fzf/$(BINARYARM6): deps
|
|
||||||
cd fzf && GOARCH=arm GOARM=6 go build -a -ldflags "-w -extldflags=$(LDFLAGS)" -tags "$(TAGS)" -o $(BINARYARM6)
|
|
||||||
|
|
||||||
fzf/$(BINARYARM7): deps
|
|
||||||
cd fzf && GOARCH=arm GOARM=7 go build -a -ldflags "-w -extldflags=$(LDFLAGS)" -tags "$(TAGS)" -o $(BINARYARM7)
|
|
||||||
|
|
||||||
fzf/$(BINARYARM8): deps
|
|
||||||
cd fzf && GOARCH=arm64 go build -a -ldflags "-w -extldflags=$(LDFLAGS)" -tags "$(TAGS)" -o $(BINARYARM8)
|
|
||||||
|
|
||||||
$(BINDIR)/fzf: fzf/$(BINARY) | $(BINDIR)
|
|
||||||
cp -f fzf/$(BINARY) $(BINDIR)
|
|
||||||
cd $(BINDIR) && ln -sf $(BINARY) fzf
|
|
||||||
|
|
||||||
$(BINDIR):
|
|
||||||
mkdir -p $@
|
|
||||||
|
|
||||||
docker-arch:
|
|
||||||
docker build -t junegunn/arch-sandbox - < Dockerfile.arch
|
|
||||||
|
|
||||||
docker-ubuntu:
|
|
||||||
docker build -t junegunn/ubuntu-sandbox - < Dockerfile.ubuntu
|
|
||||||
|
|
||||||
docker-centos:
|
|
||||||
docker build -t junegunn/centos-sandbox - < Dockerfile.centos
|
|
||||||
|
|
||||||
docker-android:
|
|
||||||
docker build -t junegunn/android-sandbox - < Dockerfile.android
|
|
||||||
|
|
||||||
arch: docker-arch
|
|
||||||
docker run $(DOCKEROPTS) junegunn/$@-sandbox \
|
|
||||||
sh -c 'cd /fzf/src; /bin/bash'
|
|
||||||
|
|
||||||
ubuntu: docker-ubuntu
|
|
||||||
docker run $(DOCKEROPTS) junegunn/$@-sandbox \
|
|
||||||
sh -c 'cd /fzf/src; /bin/bash'
|
|
||||||
|
|
||||||
centos: docker-centos
|
|
||||||
docker run $(DOCKEROPTS) junegunn/$@-sandbox \
|
|
||||||
sh -c 'cd /fzf/src; /bin/bash'
|
|
||||||
|
|
||||||
linux: docker-centos
|
|
||||||
docker run $(DOCKEROPTS) junegunn/centos-sandbox \
|
|
||||||
/bin/bash -ci 'cd /fzf/src; make TAGS=static release'
|
|
||||||
|
|
||||||
ubuntu-android: docker-android
|
|
||||||
docker run $(DOCKEROPTS) junegunn/android-sandbox \
|
|
||||||
sh -c 'cd /fzf/src; /bin/bash'
|
|
||||||
|
|
||||||
android: docker-android
|
|
||||||
docker run $(DOCKEROPTS) junegunn/android-sandbox \
|
|
||||||
/bin/bash -ci 'cd /fzf/src; GOOS=android make android-build'
|
|
||||||
|
|
||||||
.PHONY: all deps release test install uninstall clean \
|
|
||||||
linux arch ubuntu centos docker-arch docker-ubuntu docker-centos \
|
|
||||||
android-build docker-android ubuntu-android android
|
|
||||||
106
src/README.md
106
src/README.md
@@ -1,106 +0,0 @@
|
|||||||
fzf in Go
|
|
||||||
=========
|
|
||||||
|
|
||||||
<img src="https://cloud.githubusercontent.com/assets/700826/5725028/028ea834-9b93-11e4-9198-43088c3f295d.gif" height="463" alt="fzf in go">
|
|
||||||
|
|
||||||
This directory contains the source code for the new fzf implementation in
|
|
||||||
[Go][go].
|
|
||||||
|
|
||||||
Upgrade from Ruby version
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
The install script has been updated to download the right binary for your
|
|
||||||
system. If you already have installed fzf, simply git-pull the repository and
|
|
||||||
rerun the install script.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
cd ~/.fzf
|
|
||||||
git pull
|
|
||||||
./install
|
|
||||||
```
|
|
||||||
|
|
||||||
Otherwise, follow [the instruction][install] as before. You can also install
|
|
||||||
fzf using Homebrew if you prefer that way.
|
|
||||||
|
|
||||||
Motivations
|
|
||||||
-----------
|
|
||||||
|
|
||||||
### No Ruby dependency
|
|
||||||
|
|
||||||
There have always been complaints about fzf being a Ruby script. To make
|
|
||||||
matters worse, Ruby 2.1 removed ncurses binding from its standard libary.
|
|
||||||
Because of the change, users running Ruby 2.1 or above are forced to build C
|
|
||||||
extensions of curses gem to meet the requirement of fzf. The new Go version
|
|
||||||
will be distributed as an executable binary so it will be much more accessible
|
|
||||||
and should be easier to setup.
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
|
|
||||||
Many people have been surprised to see how fast fzf is even when it was
|
|
||||||
written in Ruby. It stays quite responsive even for 100k+ lines, which is
|
|
||||||
well above the size of the usual input.
|
|
||||||
|
|
||||||
The new Go version, of course, is significantly faster than that. It has all
|
|
||||||
the performance optimization techniques used in Ruby implementation and more.
|
|
||||||
It also doesn't suffer from [GIL][gil], so the search performance scales
|
|
||||||
proportional to the number of CPU cores. On my MacBook Pro (Mid 2012), the new
|
|
||||||
version was shown to be an order of magnitude faster on certain cases. It also
|
|
||||||
starts much faster though the difference may not be noticeable.
|
|
||||||
|
|
||||||
Build
|
|
||||||
-----
|
|
||||||
|
|
||||||
See [BUILD.md](../BUILD.md)
|
|
||||||
|
|
||||||
Test
|
|
||||||
----
|
|
||||||
|
|
||||||
Unit tests can be run with `make test`. Integration tests are written in Ruby
|
|
||||||
script that should be run on tmux.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
cd src
|
|
||||||
|
|
||||||
# Unit tests
|
|
||||||
make test
|
|
||||||
|
|
||||||
# Integration tests
|
|
||||||
ruby ../test/test_go.rb
|
|
||||||
|
|
||||||
# Build binary for the platform
|
|
||||||
make
|
|
||||||
|
|
||||||
# Install the executable to ../bin directory
|
|
||||||
make install
|
|
||||||
|
|
||||||
# Make release archives
|
|
||||||
make release
|
|
||||||
|
|
||||||
# Make release archives for all supported platforms
|
|
||||||
make release-all
|
|
||||||
```
|
|
||||||
|
|
||||||
Third-party libraries used
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
- ~[ncurses][ncurses]~
|
|
||||||
- [mattn/go-runewidth](https://github.com/mattn/go-runewidth)
|
|
||||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
|
||||||
- [mattn/go-shellwords](https://github.com/mattn/go-shellwords)
|
|
||||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
|
||||||
- [mattn/go-isatty](https://github.com/mattn/go-isatty)
|
|
||||||
- Licensed under [MIT](http://mattn.mit-license.org)
|
|
||||||
- [tcell](https://github.com/gdamore/tcell)
|
|
||||||
- Licensed under [Apache License 2.0](https://github.com/gdamore/tcell/blob/master/LICENSE)
|
|
||||||
|
|
||||||
License
|
|
||||||
-------
|
|
||||||
|
|
||||||
[MIT](LICENSE)
|
|
||||||
|
|
||||||
[install]: https://github.com/junegunn/fzf#installation
|
|
||||||
[go]: https://golang.org/
|
|
||||||
[gil]: http://en.wikipedia.org/wiki/Global_Interpreter_Lock
|
|
||||||
[ncurses]: https://www.gnu.org/software/ncurses/
|
|
||||||
[req]: http://golang.org/doc/install
|
|
||||||
[tcell]: https://github.com/gdamore/tcell
|
|
||||||
192
src/actiontype_string.go
Normal file
192
src/actiontype_string.go
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
// Code generated by "stringer -type=actionType"; DO NOT EDIT.
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
func _() {
|
||||||
|
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||||
|
// Re-run the stringer command to generate them again.
|
||||||
|
var x [1]struct{}
|
||||||
|
_ = x[actIgnore-0]
|
||||||
|
_ = x[actStart-1]
|
||||||
|
_ = x[actClick-2]
|
||||||
|
_ = x[actInvalid-3]
|
||||||
|
_ = x[actBracketedPasteBegin-4]
|
||||||
|
_ = x[actBracketedPasteEnd-5]
|
||||||
|
_ = x[actChar-6]
|
||||||
|
_ = x[actMouse-7]
|
||||||
|
_ = x[actBeginningOfLine-8]
|
||||||
|
_ = x[actAbort-9]
|
||||||
|
_ = x[actAccept-10]
|
||||||
|
_ = x[actAcceptNonEmpty-11]
|
||||||
|
_ = x[actAcceptOrPrintQuery-12]
|
||||||
|
_ = x[actBackwardChar-13]
|
||||||
|
_ = x[actBackwardDeleteChar-14]
|
||||||
|
_ = x[actBackwardDeleteCharEof-15]
|
||||||
|
_ = x[actBackwardWord-16]
|
||||||
|
_ = x[actBackwardSubWord-17]
|
||||||
|
_ = x[actCancel-18]
|
||||||
|
_ = x[actChangeBorderLabel-19]
|
||||||
|
_ = x[actChangeGhost-20]
|
||||||
|
_ = x[actChangeHeader-21]
|
||||||
|
_ = x[actChangeFooter-22]
|
||||||
|
_ = x[actChangeHeaderLabel-23]
|
||||||
|
_ = x[actChangeFooterLabel-24]
|
||||||
|
_ = x[actChangeInputLabel-25]
|
||||||
|
_ = x[actChangeListLabel-26]
|
||||||
|
_ = x[actChangeMulti-27]
|
||||||
|
_ = x[actChangeNth-28]
|
||||||
|
_ = x[actChangePointer-29]
|
||||||
|
_ = x[actChangePreview-30]
|
||||||
|
_ = x[actChangePreviewLabel-31]
|
||||||
|
_ = x[actChangePreviewWindow-32]
|
||||||
|
_ = x[actChangePrompt-33]
|
||||||
|
_ = x[actChangeQuery-34]
|
||||||
|
_ = x[actClearScreen-35]
|
||||||
|
_ = x[actClearQuery-36]
|
||||||
|
_ = x[actClearSelection-37]
|
||||||
|
_ = x[actClose-38]
|
||||||
|
_ = x[actDeleteChar-39]
|
||||||
|
_ = x[actDeleteCharEof-40]
|
||||||
|
_ = x[actEndOfLine-41]
|
||||||
|
_ = x[actFatal-42]
|
||||||
|
_ = x[actForwardChar-43]
|
||||||
|
_ = x[actForwardWord-44]
|
||||||
|
_ = x[actForwardSubWord-45]
|
||||||
|
_ = x[actKillLine-46]
|
||||||
|
_ = x[actKillWord-47]
|
||||||
|
_ = x[actKillSubWord-48]
|
||||||
|
_ = x[actUnixLineDiscard-49]
|
||||||
|
_ = x[actUnixWordRubout-50]
|
||||||
|
_ = x[actYank-51]
|
||||||
|
_ = x[actBackwardKillWord-52]
|
||||||
|
_ = x[actBackwardKillSubWord-53]
|
||||||
|
_ = x[actSelectAll-54]
|
||||||
|
_ = x[actDeselectAll-55]
|
||||||
|
_ = x[actToggle-56]
|
||||||
|
_ = x[actToggleSearch-57]
|
||||||
|
_ = x[actToggleAll-58]
|
||||||
|
_ = x[actToggleDown-59]
|
||||||
|
_ = x[actToggleUp-60]
|
||||||
|
_ = x[actToggleIn-61]
|
||||||
|
_ = x[actToggleOut-62]
|
||||||
|
_ = x[actToggleTrack-63]
|
||||||
|
_ = x[actToggleTrackCurrent-64]
|
||||||
|
_ = x[actToggleHeader-65]
|
||||||
|
_ = x[actToggleWrap-66]
|
||||||
|
_ = x[actToggleMultiLine-67]
|
||||||
|
_ = x[actToggleHscroll-68]
|
||||||
|
_ = x[actToggleRaw-69]
|
||||||
|
_ = x[actEnableRaw-70]
|
||||||
|
_ = x[actDisableRaw-71]
|
||||||
|
_ = x[actTrackCurrent-72]
|
||||||
|
_ = x[actToggleInput-73]
|
||||||
|
_ = x[actHideInput-74]
|
||||||
|
_ = x[actShowInput-75]
|
||||||
|
_ = x[actUntrackCurrent-76]
|
||||||
|
_ = x[actDown-77]
|
||||||
|
_ = x[actDownMatch-78]
|
||||||
|
_ = x[actUp-79]
|
||||||
|
_ = x[actUpMatch-80]
|
||||||
|
_ = x[actPageUp-81]
|
||||||
|
_ = x[actPageDown-82]
|
||||||
|
_ = x[actPosition-83]
|
||||||
|
_ = x[actHalfPageUp-84]
|
||||||
|
_ = x[actHalfPageDown-85]
|
||||||
|
_ = x[actOffsetUp-86]
|
||||||
|
_ = x[actOffsetDown-87]
|
||||||
|
_ = x[actOffsetMiddle-88]
|
||||||
|
_ = x[actJump-89]
|
||||||
|
_ = x[actJumpAccept-90]
|
||||||
|
_ = x[actPrintQuery-91]
|
||||||
|
_ = x[actRefreshPreview-92]
|
||||||
|
_ = x[actReplaceQuery-93]
|
||||||
|
_ = x[actToggleSort-94]
|
||||||
|
_ = x[actShowPreview-95]
|
||||||
|
_ = x[actHidePreview-96]
|
||||||
|
_ = x[actTogglePreview-97]
|
||||||
|
_ = x[actTogglePreviewWrap-98]
|
||||||
|
_ = x[actTransform-99]
|
||||||
|
_ = x[actTransformBorderLabel-100]
|
||||||
|
_ = x[actTransformGhost-101]
|
||||||
|
_ = x[actTransformHeader-102]
|
||||||
|
_ = x[actTransformFooter-103]
|
||||||
|
_ = x[actTransformHeaderLabel-104]
|
||||||
|
_ = x[actTransformFooterLabel-105]
|
||||||
|
_ = x[actTransformInputLabel-106]
|
||||||
|
_ = x[actTransformListLabel-107]
|
||||||
|
_ = x[actTransformNth-108]
|
||||||
|
_ = x[actTransformPointer-109]
|
||||||
|
_ = x[actTransformPreviewLabel-110]
|
||||||
|
_ = x[actTransformPrompt-111]
|
||||||
|
_ = x[actTransformQuery-112]
|
||||||
|
_ = x[actTransformSearch-113]
|
||||||
|
_ = x[actTrigger-114]
|
||||||
|
_ = x[actBgTransform-115]
|
||||||
|
_ = x[actBgTransformBorderLabel-116]
|
||||||
|
_ = x[actBgTransformGhost-117]
|
||||||
|
_ = x[actBgTransformHeader-118]
|
||||||
|
_ = x[actBgTransformFooter-119]
|
||||||
|
_ = x[actBgTransformHeaderLabel-120]
|
||||||
|
_ = x[actBgTransformFooterLabel-121]
|
||||||
|
_ = x[actBgTransformInputLabel-122]
|
||||||
|
_ = x[actBgTransformListLabel-123]
|
||||||
|
_ = x[actBgTransformNth-124]
|
||||||
|
_ = x[actBgTransformPointer-125]
|
||||||
|
_ = x[actBgTransformPreviewLabel-126]
|
||||||
|
_ = x[actBgTransformPrompt-127]
|
||||||
|
_ = x[actBgTransformQuery-128]
|
||||||
|
_ = x[actBgTransformSearch-129]
|
||||||
|
_ = x[actBgCancel-130]
|
||||||
|
_ = x[actSearch-131]
|
||||||
|
_ = x[actPreview-132]
|
||||||
|
_ = x[actPreviewTop-133]
|
||||||
|
_ = x[actPreviewBottom-134]
|
||||||
|
_ = x[actPreviewUp-135]
|
||||||
|
_ = x[actPreviewDown-136]
|
||||||
|
_ = x[actPreviewPageUp-137]
|
||||||
|
_ = x[actPreviewPageDown-138]
|
||||||
|
_ = x[actPreviewHalfPageUp-139]
|
||||||
|
_ = x[actPreviewHalfPageDown-140]
|
||||||
|
_ = x[actPrevHistory-141]
|
||||||
|
_ = x[actPrevSelected-142]
|
||||||
|
_ = x[actPrint-143]
|
||||||
|
_ = x[actPut-144]
|
||||||
|
_ = x[actNextHistory-145]
|
||||||
|
_ = x[actNextSelected-146]
|
||||||
|
_ = x[actExecute-147]
|
||||||
|
_ = x[actExecuteSilent-148]
|
||||||
|
_ = x[actExecuteMulti-149]
|
||||||
|
_ = x[actSigStop-150]
|
||||||
|
_ = x[actBest-151]
|
||||||
|
_ = x[actFirst-152]
|
||||||
|
_ = x[actLast-153]
|
||||||
|
_ = x[actReload-154]
|
||||||
|
_ = x[actReloadSync-155]
|
||||||
|
_ = x[actDisableSearch-156]
|
||||||
|
_ = x[actEnableSearch-157]
|
||||||
|
_ = x[actSelect-158]
|
||||||
|
_ = x[actDeselect-159]
|
||||||
|
_ = x[actUnbind-160]
|
||||||
|
_ = x[actRebind-161]
|
||||||
|
_ = x[actToggleBind-162]
|
||||||
|
_ = x[actBecome-163]
|
||||||
|
_ = x[actShowHeader-164]
|
||||||
|
_ = x[actHideHeader-165]
|
||||||
|
_ = x[actBell-166]
|
||||||
|
_ = x[actExclude-167]
|
||||||
|
_ = x[actExcludeMulti-168]
|
||||||
|
_ = x[actAsync-169]
|
||||||
|
}
|
||||||
|
|
||||||
|
const _actionType_name = "actIgnoreactStartactClickactInvalidactBracketedPasteBeginactBracketedPasteEndactCharactMouseactBeginningOfLineactAbortactAcceptactAcceptNonEmptyactAcceptOrPrintQueryactBackwardCharactBackwardDeleteCharactBackwardDeleteCharEofactBackwardWordactBackwardSubWordactCancelactChangeBorderLabelactChangeGhostactChangeHeaderactChangeFooteractChangeHeaderLabelactChangeFooterLabelactChangeInputLabelactChangeListLabelactChangeMultiactChangeNthactChangePointeractChangePreviewactChangePreviewLabelactChangePreviewWindowactChangePromptactChangeQueryactClearScreenactClearQueryactClearSelectionactCloseactDeleteCharactDeleteCharEofactEndOfLineactFatalactForwardCharactForwardWordactForwardSubWordactKillLineactKillWordactKillSubWordactUnixLineDiscardactUnixWordRuboutactYankactBackwardKillWordactBackwardKillSubWordactSelectAllactDeselectAllactToggleactToggleSearchactToggleAllactToggleDownactToggleUpactToggleInactToggleOutactToggleTrackactToggleTrackCurrentactToggleHeaderactToggleWrapactToggleMultiLineactToggleHscrollactToggleRawactEnableRawactDisableRawactTrackCurrentactToggleInputactHideInputactShowInputactUntrackCurrentactDownactDownMatchactUpactUpMatchactPageUpactPageDownactPositionactHalfPageUpactHalfPageDownactOffsetUpactOffsetDownactOffsetMiddleactJumpactJumpAcceptactPrintQueryactRefreshPreviewactReplaceQueryactToggleSortactShowPreviewactHidePreviewactTogglePreviewactTogglePreviewWrapactTransformactTransformBorderLabelactTransformGhostactTransformHeaderactTransformFooteractTransformHeaderLabelactTransformFooterLabelactTransformInputLabelactTransformListLabelactTransformNthactTransformPointeractTransformPreviewLabelactTransformPromptactTransformQueryactTransformSearchactTriggeractBgTransformactBgTransformBorderLabelactBgTransformGhostactBgTransformHeaderactBgTransformFooteractBgTransformHeaderLabelactBgTransformFooterLabelactBgTransformInputLabelactBgTransformListLabelactBgTransformNthactBgTransformPointeractBgTransformPreviewLabelactBgTransformPromptactBgTransformQueryactBgTransformSearchactBgCancelactSearchactPreviewactPreviewTopactPreviewBottomactPreviewUpactPreviewDownactPreviewPageUpactPreviewPageDownactPreviewHalfPageUpactPreviewHalfPageDownactPrevHistoryactPrevSelectedactPrintactPutactNextHistoryactNextSelectedactExecuteactExecuteSilentactExecuteMultiactSigStopactBestactFirstactLastactReloadactReloadSyncactDisableSearchactEnableSearchactSelectactDeselectactUnbindactRebindactToggleBindactBecomeactShowHeaderactHideHeaderactBellactExcludeactExcludeMultiactAsync"
|
||||||
|
|
||||||
|
var _actionType_index = [...]uint16{0, 9, 17, 25, 35, 57, 77, 84, 92, 110, 118, 127, 144, 165, 180, 201, 225, 240, 258, 267, 287, 301, 316, 331, 351, 371, 390, 408, 422, 434, 450, 466, 487, 509, 524, 538, 552, 565, 582, 590, 603, 619, 631, 639, 653, 667, 684, 695, 706, 720, 738, 755, 762, 781, 803, 815, 829, 838, 853, 865, 878, 889, 900, 912, 926, 947, 962, 975, 993, 1009, 1021, 1033, 1046, 1061, 1075, 1087, 1099, 1116, 1123, 1135, 1140, 1150, 1159, 1170, 1181, 1194, 1209, 1220, 1233, 1248, 1255, 1268, 1281, 1298, 1313, 1326, 1340, 1354, 1370, 1390, 1402, 1425, 1442, 1460, 1478, 1501, 1524, 1546, 1567, 1582, 1601, 1625, 1643, 1660, 1678, 1688, 1702, 1727, 1746, 1766, 1786, 1811, 1836, 1860, 1883, 1900, 1921, 1947, 1967, 1986, 2006, 2017, 2026, 2036, 2049, 2065, 2077, 2091, 2107, 2125, 2145, 2167, 2181, 2196, 2204, 2210, 2224, 2239, 2249, 2265, 2280, 2290, 2297, 2305, 2312, 2321, 2334, 2350, 2365, 2374, 2385, 2394, 2403, 2416, 2425, 2438, 2451, 2458, 2468, 2483, 2491}
|
||||||
|
|
||||||
|
func (i actionType) String() string {
|
||||||
|
if i < 0 || i >= actionType(len(_actionType_index)-1) {
|
||||||
|
return "actionType(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||||
|
}
|
||||||
|
return _actionType_name[_actionType_index[i]:_actionType_index[i+1]]
|
||||||
|
}
|
||||||
611
src/algo/algo.go
611
src/algo/algo.go
@@ -78,15 +78,22 @@ Scoring criteria
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
var DEBUG bool
|
var DEBUG bool
|
||||||
|
|
||||||
|
var delimiterChars = "/,:;|"
|
||||||
|
|
||||||
|
const whiteChars = " \t\n\v\f\r\x85\xA0"
|
||||||
|
|
||||||
func indexAt(index int, max int, forward bool) int {
|
func indexAt(index int, max int, forward bool) int {
|
||||||
if forward {
|
if forward {
|
||||||
return index
|
return index
|
||||||
@@ -105,7 +112,7 @@ type Result struct {
|
|||||||
const (
|
const (
|
||||||
scoreMatch = 16
|
scoreMatch = 16
|
||||||
scoreGapStart = -3
|
scoreGapStart = -3
|
||||||
scoreGapExtention = -1
|
scoreGapExtension = -1
|
||||||
|
|
||||||
// We prefer matches at the beginning of a word, but the bonus should not be
|
// We prefer matches at the beginning of a word, but the bonus should not be
|
||||||
// too great to prevent the longer acronym matches from always winning over
|
// too great to prevent the longer acronym matches from always winning over
|
||||||
@@ -123,31 +130,93 @@ const (
|
|||||||
// Edge-triggered bonus for matches in camelCase words.
|
// Edge-triggered bonus for matches in camelCase words.
|
||||||
// Compared to word-boundary case, they don't accompany single-character gaps
|
// Compared to word-boundary case, they don't accompany single-character gaps
|
||||||
// (e.g. FooBar vs. foo-bar), so we deduct bonus point accordingly.
|
// (e.g. FooBar vs. foo-bar), so we deduct bonus point accordingly.
|
||||||
bonusCamel123 = bonusBoundary + scoreGapExtention
|
bonusCamel123 = bonusBoundary + scoreGapExtension
|
||||||
|
|
||||||
// Minimum bonus point given to characters in consecutive chunks.
|
// Minimum bonus point given to characters in consecutive chunks.
|
||||||
// Note that bonus points for consecutive matches shouldn't have needed if we
|
// Note that bonus points for consecutive matches shouldn't have needed if we
|
||||||
// used fixed match score as in the original algorithm.
|
// used fixed match score as in the original algorithm.
|
||||||
bonusConsecutive = -(scoreGapStart + scoreGapExtention)
|
bonusConsecutive = -(scoreGapStart + scoreGapExtension)
|
||||||
|
|
||||||
// The first character in the typed pattern usually has more significance
|
// The first character in the typed pattern usually has more significance
|
||||||
// than the rest so it's important that it appears at special positions where
|
// than the rest so it's important that it appears at special positions where
|
||||||
// bonus points are given. e.g. "to-go" vs. "ongoing" on "og" or on "ogo".
|
// bonus points are given, e.g. "to-go" vs. "ongoing" on "og" or on "ogo".
|
||||||
// The amount of the extra bonus should be limited so that the gap penalty is
|
// The amount of the extra bonus should be limited so that the gap penalty is
|
||||||
// still respected.
|
// still respected.
|
||||||
bonusFirstCharMultiplier = 2
|
bonusFirstCharMultiplier = 2
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Extra bonus for word boundary after whitespace character or beginning of the string
|
||||||
|
bonusBoundaryWhite int16 = bonusBoundary + 2
|
||||||
|
|
||||||
|
// Extra bonus for word boundary after slash, colon, semi-colon, and comma
|
||||||
|
bonusBoundaryDelimiter int16 = bonusBoundary + 1
|
||||||
|
|
||||||
|
initialCharClass = charWhite
|
||||||
|
|
||||||
|
// A minor optimization that can give 15%+ performance boost
|
||||||
|
asciiCharClasses [unicode.MaxASCII + 1]charClass
|
||||||
|
|
||||||
|
// A minor optimization that can give yet another 5% performance boost
|
||||||
|
bonusMatrix [charNumber + 1][charNumber + 1]int16
|
||||||
|
)
|
||||||
|
|
||||||
type charClass int
|
type charClass int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
charNonWord charClass = iota
|
charWhite charClass = iota
|
||||||
|
charNonWord
|
||||||
|
charDelimiter
|
||||||
charLower
|
charLower
|
||||||
charUpper
|
charUpper
|
||||||
charLetter
|
charLetter
|
||||||
charNumber
|
charNumber
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func Init(scheme string) bool {
|
||||||
|
switch scheme {
|
||||||
|
case "default":
|
||||||
|
bonusBoundaryWhite = bonusBoundary + 2
|
||||||
|
bonusBoundaryDelimiter = bonusBoundary + 1
|
||||||
|
case "path":
|
||||||
|
bonusBoundaryWhite = bonusBoundary
|
||||||
|
bonusBoundaryDelimiter = bonusBoundary + 1
|
||||||
|
if os.PathSeparator == '/' {
|
||||||
|
delimiterChars = "/"
|
||||||
|
} else {
|
||||||
|
delimiterChars = string([]rune{os.PathSeparator, '/'})
|
||||||
|
}
|
||||||
|
initialCharClass = charDelimiter
|
||||||
|
case "history":
|
||||||
|
bonusBoundaryWhite = bonusBoundary
|
||||||
|
bonusBoundaryDelimiter = bonusBoundary
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for i := 0; i <= unicode.MaxASCII; i++ {
|
||||||
|
char := rune(i)
|
||||||
|
c := charNonWord
|
||||||
|
if char >= 'a' && char <= 'z' {
|
||||||
|
c = charLower
|
||||||
|
} else if char >= 'A' && char <= 'Z' {
|
||||||
|
c = charUpper
|
||||||
|
} else if char >= '0' && char <= '9' {
|
||||||
|
c = charNumber
|
||||||
|
} else if strings.ContainsRune(whiteChars, char) {
|
||||||
|
c = charWhite
|
||||||
|
} else if strings.ContainsRune(delimiterChars, char) {
|
||||||
|
c = charDelimiter
|
||||||
|
}
|
||||||
|
asciiCharClasses[i] = c
|
||||||
|
}
|
||||||
|
for i := 0; i <= int(charNumber); i++ {
|
||||||
|
for j := 0; j <= int(charNumber); j++ {
|
||||||
|
bonusMatrix[i][j] = bonusFor(charClass(i), charClass(j))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func posArray(withPos bool, len int) *[]int {
|
func posArray(withPos bool, len int) *[]int {
|
||||||
if withPos {
|
if withPos {
|
||||||
pos := make([]int, 0, len)
|
pos := make([]int, 0, len)
|
||||||
@@ -156,43 +225,22 @@ func posArray(withPos bool, len int) *[]int {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func alloc16(offset int, slab *util.Slab, size int, clear bool) (int, []int16) {
|
func alloc16(offset int, slab *util.Slab, size int) (int, []int16) {
|
||||||
if slab != nil && cap(slab.I16) > offset+size {
|
if slab != nil && cap(slab.I16) > offset+size {
|
||||||
slice := slab.I16[offset : offset+size]
|
slice := slab.I16[offset : offset+size]
|
||||||
if clear {
|
|
||||||
for idx := range slice {
|
|
||||||
slice[idx] = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return offset + size, slice
|
return offset + size, slice
|
||||||
}
|
}
|
||||||
return offset, make([]int16, size)
|
return offset, make([]int16, size)
|
||||||
}
|
}
|
||||||
|
|
||||||
func alloc32(offset int, slab *util.Slab, size int, clear bool) (int, []int32) {
|
func alloc32(offset int, slab *util.Slab, size int) (int, []int32) {
|
||||||
if slab != nil && cap(slab.I32) > offset+size {
|
if slab != nil && cap(slab.I32) > offset+size {
|
||||||
slice := slab.I32[offset : offset+size]
|
slice := slab.I32[offset : offset+size]
|
||||||
if clear {
|
|
||||||
for idx := range slice {
|
|
||||||
slice[idx] = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return offset + size, slice
|
return offset + size, slice
|
||||||
}
|
}
|
||||||
return offset, make([]int32, size)
|
return offset, make([]int32, size)
|
||||||
}
|
}
|
||||||
|
|
||||||
func charClassOfAscii(char rune) charClass {
|
|
||||||
if char >= 'a' && char <= 'z' {
|
|
||||||
return charLower
|
|
||||||
} else if char >= 'A' && char <= 'Z' {
|
|
||||||
return charUpper
|
|
||||||
} else if char >= '0' && char <= '9' {
|
|
||||||
return charNumber
|
|
||||||
}
|
|
||||||
return charNonWord
|
|
||||||
}
|
|
||||||
|
|
||||||
func charClassOfNonAscii(char rune) charClass {
|
func charClassOfNonAscii(char rune) charClass {
|
||||||
if unicode.IsLower(char) {
|
if unicode.IsLower(char) {
|
||||||
return charLower
|
return charLower
|
||||||
@@ -202,40 +250,60 @@ func charClassOfNonAscii(char rune) charClass {
|
|||||||
return charNumber
|
return charNumber
|
||||||
} else if unicode.IsLetter(char) {
|
} else if unicode.IsLetter(char) {
|
||||||
return charLetter
|
return charLetter
|
||||||
|
} else if unicode.IsSpace(char) {
|
||||||
|
return charWhite
|
||||||
|
} else if strings.ContainsRune(delimiterChars, char) {
|
||||||
|
return charDelimiter
|
||||||
}
|
}
|
||||||
return charNonWord
|
return charNonWord
|
||||||
}
|
}
|
||||||
|
|
||||||
func charClassOf(char rune) charClass {
|
func charClassOf(char rune) charClass {
|
||||||
if char <= unicode.MaxASCII {
|
if char <= unicode.MaxASCII {
|
||||||
return charClassOfAscii(char)
|
return asciiCharClasses[char]
|
||||||
}
|
}
|
||||||
return charClassOfNonAscii(char)
|
return charClassOfNonAscii(char)
|
||||||
}
|
}
|
||||||
|
|
||||||
func bonusFor(prevClass charClass, class charClass) int16 {
|
func bonusFor(prevClass charClass, class charClass) int16 {
|
||||||
if prevClass == charNonWord && class != charNonWord {
|
if class > charNonWord {
|
||||||
// Word boundary
|
switch prevClass {
|
||||||
return bonusBoundary
|
case charWhite:
|
||||||
} else if prevClass == charLower && class == charUpper ||
|
// Word boundary after whitespace
|
||||||
|
return bonusBoundaryWhite
|
||||||
|
case charDelimiter:
|
||||||
|
// Word boundary after a delimiter character
|
||||||
|
return bonusBoundaryDelimiter
|
||||||
|
case charNonWord:
|
||||||
|
// Word boundary
|
||||||
|
return bonusBoundary
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if prevClass == charLower && class == charUpper ||
|
||||||
prevClass != charNumber && class == charNumber {
|
prevClass != charNumber && class == charNumber {
|
||||||
// camelCase letter123
|
// camelCase letter123
|
||||||
return bonusCamel123
|
return bonusCamel123
|
||||||
} else if class == charNonWord {
|
}
|
||||||
|
|
||||||
|
switch class {
|
||||||
|
case charNonWord, charDelimiter:
|
||||||
return bonusNonWord
|
return bonusNonWord
|
||||||
|
case charWhite:
|
||||||
|
return bonusBoundaryWhite
|
||||||
}
|
}
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func bonusAt(input util.Chars, idx int) int16 {
|
func bonusAt(input *util.Chars, idx int) int16 {
|
||||||
if idx == 0 {
|
if idx == 0 {
|
||||||
return bonusBoundary
|
return bonusBoundaryWhite
|
||||||
}
|
}
|
||||||
return bonusFor(charClassOf(input.Get(idx-1)), charClassOf(input.Get(idx)))
|
return bonusMatrix[charClassOf(input.Get(idx-1))][charClassOf(input.Get(idx))]
|
||||||
}
|
}
|
||||||
|
|
||||||
func normalizeRune(r rune) rune {
|
func normalizeRune(r rune) rune {
|
||||||
if r < 0x00C0 || r > 0x2184 {
|
if r < 0x00C0 || r > 0xFF61 {
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -249,20 +317,130 @@ func normalizeRune(r rune) rune {
|
|||||||
// Algo functions make two assumptions
|
// Algo functions make two assumptions
|
||||||
// 1. "pattern" is given in lowercase if "caseSensitive" is false
|
// 1. "pattern" is given in lowercase if "caseSensitive" is false
|
||||||
// 2. "pattern" is already normalized if "normalize" is true
|
// 2. "pattern" is already normalized if "normalize" is true
|
||||||
type Algo func(caseSensitive bool, normalize bool, forward bool, input util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int)
|
type Algo func(caseSensitive bool, normalize bool, forward bool, input *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int)
|
||||||
|
|
||||||
func FuzzyMatchV2(caseSensitive bool, normalize bool, forward bool, input util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
func trySkip(input *util.Chars, caseSensitive bool, b byte, from int) int {
|
||||||
|
byteArray := input.Bytes()[from:]
|
||||||
|
idx := bytes.IndexByte(byteArray, b)
|
||||||
|
if idx == 0 {
|
||||||
|
// Can't skip any further
|
||||||
|
return from
|
||||||
|
}
|
||||||
|
// We may need to search for the uppercase letter again. We don't have to
|
||||||
|
// consider normalization as we can be sure that this is an ASCII string.
|
||||||
|
if !caseSensitive && b >= 'a' && b <= 'z' {
|
||||||
|
if idx > 0 {
|
||||||
|
byteArray = byteArray[:idx]
|
||||||
|
}
|
||||||
|
uidx := bytes.IndexByte(byteArray, b-32)
|
||||||
|
if uidx >= 0 {
|
||||||
|
idx = uidx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if idx < 0 {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
return from + idx
|
||||||
|
}
|
||||||
|
|
||||||
|
func isAscii(runes []rune) bool {
|
||||||
|
for _, r := range runes {
|
||||||
|
if r >= utf8.RuneSelf {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func asciiFuzzyIndex(input *util.Chars, pattern []rune, caseSensitive bool) (int, int) {
|
||||||
|
// Can't determine
|
||||||
|
if !input.IsBytes() {
|
||||||
|
return 0, input.Length()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not possible
|
||||||
|
if !isAscii(pattern) {
|
||||||
|
return -1, -1
|
||||||
|
}
|
||||||
|
|
||||||
|
firstIdx, idx, lastIdx := 0, 0, 0
|
||||||
|
var b byte
|
||||||
|
for pidx := 0; pidx < len(pattern); pidx++ {
|
||||||
|
b = byte(pattern[pidx])
|
||||||
|
idx = trySkip(input, caseSensitive, b, idx)
|
||||||
|
if idx < 0 {
|
||||||
|
return -1, -1
|
||||||
|
}
|
||||||
|
if pidx == 0 && idx > 0 {
|
||||||
|
// Step back to find the right bonus point
|
||||||
|
firstIdx = idx - 1
|
||||||
|
}
|
||||||
|
lastIdx = idx
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the last appearance of the last character of the pattern to limit the search scope
|
||||||
|
bu := b
|
||||||
|
if !caseSensitive && b >= 'a' && b <= 'z' {
|
||||||
|
bu = b - 32
|
||||||
|
}
|
||||||
|
scope := input.Bytes()[lastIdx:]
|
||||||
|
for offset := len(scope) - 1; offset > 0; offset-- {
|
||||||
|
if scope[offset] == b || scope[offset] == bu {
|
||||||
|
return firstIdx, lastIdx + offset + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return firstIdx, lastIdx + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func debugV2(T []rune, pattern []rune, F []int32, lastIdx int, H []int16, C []int16) {
|
||||||
|
width := lastIdx - int(F[0]) + 1
|
||||||
|
|
||||||
|
for i, f := range F {
|
||||||
|
I := i * width
|
||||||
|
if i == 0 {
|
||||||
|
fmt.Print(" ")
|
||||||
|
for j := int(f); j <= lastIdx; j++ {
|
||||||
|
fmt.Print(" " + string(T[j]) + " ")
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
fmt.Print(string(pattern[i]) + " ")
|
||||||
|
for idx := int(F[0]); idx < int(f); idx++ {
|
||||||
|
fmt.Print(" 0 ")
|
||||||
|
}
|
||||||
|
for idx := int(f); idx <= lastIdx; idx++ {
|
||||||
|
fmt.Printf("%2d ", H[i*width+idx-int(F[0])])
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
fmt.Print(" ")
|
||||||
|
for idx, p := range C[I : I+width] {
|
||||||
|
if idx+int(F[0]) < int(F[i]) {
|
||||||
|
p = 0
|
||||||
|
}
|
||||||
|
if p > 0 {
|
||||||
|
fmt.Printf("%2d ", p)
|
||||||
|
} else {
|
||||||
|
fmt.Print(" ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func FuzzyMatchV2(caseSensitive bool, normalize bool, forward bool, input *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
// Assume that pattern is given in lowercase if case-insensitive.
|
// Assume that pattern is given in lowercase if case-insensitive.
|
||||||
// First check if there's a match and calculate bonus for each position.
|
// First check if there's a match and calculate bonus for each position.
|
||||||
// If the input string is too long, consider finding the matching chars in
|
// If the input string is too long, consider finding the matching chars in
|
||||||
// this phase as well (non-optimal alignment).
|
// this phase as well (non-optimal alignment).
|
||||||
N := input.Length()
|
|
||||||
M := len(pattern)
|
M := len(pattern)
|
||||||
switch M {
|
if M == 0 {
|
||||||
case 0:
|
|
||||||
return Result{0, 0, 0}, posArray(withPos, M)
|
return Result{0, 0, 0}, posArray(withPos, M)
|
||||||
case 1:
|
}
|
||||||
return ExactMatchNaive(caseSensitive, normalize, forward, input, pattern[0:1], withPos, slab)
|
N := input.Length()
|
||||||
|
if M > N {
|
||||||
|
return Result{-1, -1, 0}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Since O(nm) algorithm can be prohibitively expensive for large input,
|
// Since O(nm) algorithm can be prohibitively expensive for large input,
|
||||||
@@ -271,158 +449,179 @@ func FuzzyMatchV2(caseSensitive bool, normalize bool, forward bool, input util.C
|
|||||||
return FuzzyMatchV1(caseSensitive, normalize, forward, input, pattern, withPos, slab)
|
return FuzzyMatchV1(caseSensitive, normalize, forward, input, pattern, withPos, slab)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Phase 1. Optimized search for ASCII string
|
||||||
|
minIdx, maxIdx := asciiFuzzyIndex(input, pattern, caseSensitive)
|
||||||
|
if minIdx < 0 {
|
||||||
|
return Result{-1, -1, 0}, nil
|
||||||
|
}
|
||||||
|
// fmt.Println(N, maxIdx, idx, maxIdx-idx, input.ToString())
|
||||||
|
N = maxIdx - minIdx
|
||||||
|
|
||||||
// Reuse pre-allocated integer slice to avoid unnecessary sweeping of garbages
|
// Reuse pre-allocated integer slice to avoid unnecessary sweeping of garbages
|
||||||
offset16 := 0
|
offset16 := 0
|
||||||
offset32 := 0
|
offset32 := 0
|
||||||
|
offset16, H0 := alloc16(offset16, slab, N)
|
||||||
|
offset16, C0 := alloc16(offset16, slab, N)
|
||||||
// Bonus point for each position
|
// Bonus point for each position
|
||||||
offset16, B := alloc16(offset16, slab, N, false)
|
offset16, B := alloc16(offset16, slab, N)
|
||||||
// The first occurrence of each character in the pattern
|
// The first occurrence of each character in the pattern
|
||||||
offset32, F := alloc32(offset32, slab, M, false)
|
offset32, F := alloc32(offset32, slab, M)
|
||||||
// Rune array
|
// Rune array
|
||||||
offset32, T := alloc32(offset32, slab, N, false)
|
_, T := alloc32(offset32, slab, N)
|
||||||
|
input.CopyRunes(T, minIdx)
|
||||||
|
|
||||||
// Phase 1. Check if there's a match and calculate bonus for each point
|
// Phase 2. Calculate bonus for each point
|
||||||
pidx, lastIdx, prevClass := 0, 0, charNonWord
|
maxScore, maxScorePos := int16(0), 0
|
||||||
for idx := 0; idx < N; idx++ {
|
pidx, lastIdx := 0, 0
|
||||||
char := input.Get(idx)
|
pchar0, pchar, prevH0, prevClass, inGap := pattern[0], pattern[0], int16(0), initialCharClass, false
|
||||||
|
for off, char := range T {
|
||||||
var class charClass
|
var class charClass
|
||||||
if char <= unicode.MaxASCII {
|
if char <= unicode.MaxASCII {
|
||||||
class = charClassOfAscii(char)
|
class = asciiCharClasses[char]
|
||||||
|
if !caseSensitive && class == charUpper {
|
||||||
|
char += 32
|
||||||
|
T[off] = char
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
class = charClassOfNonAscii(char)
|
class = charClassOfNonAscii(char)
|
||||||
}
|
if !caseSensitive && class == charUpper {
|
||||||
|
|
||||||
if !caseSensitive && class == charUpper {
|
|
||||||
if char <= unicode.MaxASCII {
|
|
||||||
char += 32
|
|
||||||
} else {
|
|
||||||
char = unicode.To(unicode.LowerCase, char)
|
char = unicode.To(unicode.LowerCase, char)
|
||||||
}
|
}
|
||||||
|
if normalize {
|
||||||
|
char = normalizeRune(char)
|
||||||
|
}
|
||||||
|
T[off] = char
|
||||||
}
|
}
|
||||||
|
|
||||||
if normalize {
|
bonus := bonusMatrix[prevClass][class]
|
||||||
char = normalizeRune(char)
|
B[off] = bonus
|
||||||
}
|
|
||||||
|
|
||||||
T[idx] = char
|
|
||||||
B[idx] = bonusFor(prevClass, class)
|
|
||||||
prevClass = class
|
prevClass = class
|
||||||
|
|
||||||
if pidx < M {
|
if char == pchar {
|
||||||
if char == pattern[pidx] {
|
if pidx < M {
|
||||||
lastIdx = idx
|
F[pidx] = int32(off)
|
||||||
F[pidx] = int32(idx)
|
|
||||||
pidx++
|
pidx++
|
||||||
|
pchar = pattern[util.Min(pidx, M-1)]
|
||||||
}
|
}
|
||||||
} else {
|
lastIdx = off
|
||||||
if char == pattern[M-1] {
|
|
||||||
lastIdx = idx
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if char == pchar0 {
|
||||||
|
score := scoreMatch + bonus*bonusFirstCharMultiplier
|
||||||
|
H0[off] = score
|
||||||
|
C0[off] = 1
|
||||||
|
if M == 1 && (forward && score > maxScore || !forward && score >= maxScore) {
|
||||||
|
maxScore, maxScorePos = score, off
|
||||||
|
if forward && bonus >= bonusBoundary {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inGap = false
|
||||||
|
} else {
|
||||||
|
if inGap {
|
||||||
|
H0[off] = util.Max16(prevH0+scoreGapExtension, 0)
|
||||||
|
} else {
|
||||||
|
H0[off] = util.Max16(prevH0+scoreGapStart, 0)
|
||||||
|
}
|
||||||
|
C0[off] = 0
|
||||||
|
inGap = true
|
||||||
|
}
|
||||||
|
prevH0 = H0[off]
|
||||||
}
|
}
|
||||||
if pidx != M {
|
if pidx != M {
|
||||||
return Result{-1, -1, 0}, nil
|
return Result{-1, -1, 0}, nil
|
||||||
}
|
}
|
||||||
|
if M == 1 {
|
||||||
|
result := Result{minIdx + maxScorePos, minIdx + maxScorePos + 1, int(maxScore)}
|
||||||
|
if !withPos {
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
pos := []int{minIdx + maxScorePos}
|
||||||
|
return result, &pos
|
||||||
|
}
|
||||||
|
|
||||||
// Phase 2. Fill in score matrix (H)
|
// Phase 3. Fill in score matrix (H)
|
||||||
// Unlike the original algorithm, we do not allow omission.
|
// Unlike the original algorithm, we do not allow omission.
|
||||||
width := lastIdx - int(F[0]) + 1
|
f0 := int(F[0])
|
||||||
offset16, H := alloc16(offset16, slab, width*M, false)
|
width := lastIdx - f0 + 1
|
||||||
|
offset16, H := alloc16(offset16, slab, width*M)
|
||||||
|
copy(H, H0[f0:lastIdx+1])
|
||||||
|
|
||||||
// Possible length of consecutive chunk at each position.
|
// Possible length of consecutive chunk at each position.
|
||||||
offset16, C := alloc16(offset16, slab, width*M, false)
|
_, C := alloc16(offset16, slab, width*M)
|
||||||
|
copy(C, C0[f0:lastIdx+1])
|
||||||
|
|
||||||
maxScore, maxScorePos := int16(0), 0
|
Fsub := F[1:]
|
||||||
for i := 0; i < M; i++ {
|
Psub := pattern[1:][:len(Fsub)]
|
||||||
I := i * width
|
for off, f := range Fsub {
|
||||||
|
f := int(f)
|
||||||
|
pchar := Psub[off]
|
||||||
|
pidx := off + 1
|
||||||
|
row := pidx * width
|
||||||
inGap := false
|
inGap := false
|
||||||
for j := int(F[i]); j <= lastIdx; j++ {
|
Tsub := T[f : lastIdx+1]
|
||||||
j0 := j - int(F[0])
|
Bsub := B[f:][:len(Tsub)]
|
||||||
|
Csub := C[row+f-f0:][:len(Tsub)]
|
||||||
|
Cdiag := C[row+f-f0-1-width:][:len(Tsub)]
|
||||||
|
Hsub := H[row+f-f0:][:len(Tsub)]
|
||||||
|
Hdiag := H[row+f-f0-1-width:][:len(Tsub)]
|
||||||
|
Hleft := H[row+f-f0-1:][:len(Tsub)]
|
||||||
|
Hleft[0] = 0
|
||||||
|
for off, char := range Tsub {
|
||||||
|
col := off + f
|
||||||
var s1, s2, consecutive int16
|
var s1, s2, consecutive int16
|
||||||
|
|
||||||
if j > int(F[i]) {
|
if inGap {
|
||||||
if inGap {
|
s2 = Hleft[off] + scoreGapExtension
|
||||||
s2 = H[I+j0-1] + scoreGapExtention
|
} else {
|
||||||
} else {
|
s2 = Hleft[off] + scoreGapStart
|
||||||
s2 = H[I+j0-1] + scoreGapStart
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if pattern[i] == T[j] {
|
if pchar == char {
|
||||||
var diag int16
|
s1 = Hdiag[off] + scoreMatch
|
||||||
if i > 0 && j0 > 0 {
|
b := Bsub[off]
|
||||||
diag = H[I-width+j0-1]
|
consecutive = Cdiag[off] + 1
|
||||||
}
|
if consecutive > 1 {
|
||||||
s1 = diag + scoreMatch
|
fb := B[col-int(consecutive)+1]
|
||||||
b := B[j]
|
|
||||||
if i > 0 {
|
|
||||||
// j > 0 if i > 0
|
|
||||||
consecutive = C[I-width+j0-1] + 1
|
|
||||||
// Break consecutive chunk
|
// Break consecutive chunk
|
||||||
if b == bonusBoundary {
|
if b >= bonusBoundary && b > fb {
|
||||||
consecutive = 1
|
consecutive = 1
|
||||||
} else if consecutive > 1 {
|
} else {
|
||||||
b = util.Max16(b, util.Max16(bonusConsecutive, B[j-int(consecutive)+1]))
|
b = util.Max16(b, util.Max16(bonusConsecutive, fb))
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
consecutive = 1
|
|
||||||
b *= bonusFirstCharMultiplier
|
|
||||||
}
|
}
|
||||||
if s1+b < s2 {
|
if s1+b < s2 {
|
||||||
s1 += B[j]
|
s1 += Bsub[off]
|
||||||
consecutive = 0
|
consecutive = 0
|
||||||
} else {
|
} else {
|
||||||
s1 += b
|
s1 += b
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
C[I+j0] = consecutive
|
Csub[off] = consecutive
|
||||||
|
|
||||||
inGap = s1 < s2
|
inGap = s1 < s2
|
||||||
score := util.Max16(util.Max16(s1, s2), 0)
|
score := util.Max16(util.Max16(s1, s2), 0)
|
||||||
if i == M-1 && (forward && score > maxScore || !forward && score >= maxScore) {
|
if pidx == M-1 && (forward && score > maxScore || !forward && score >= maxScore) {
|
||||||
maxScore, maxScorePos = score, j
|
maxScore, maxScorePos = score, col
|
||||||
}
|
}
|
||||||
H[I+j0] = score
|
Hsub[off] = score
|
||||||
}
|
|
||||||
|
|
||||||
if DEBUG {
|
|
||||||
if i == 0 {
|
|
||||||
fmt.Print(" ")
|
|
||||||
for j := int(F[i]); j <= lastIdx; j++ {
|
|
||||||
fmt.Printf(" " + string(input.Get(j)) + " ")
|
|
||||||
}
|
|
||||||
fmt.Println()
|
|
||||||
}
|
|
||||||
fmt.Print(string(pattern[i]) + " ")
|
|
||||||
for idx := int(F[0]); idx < int(F[i]); idx++ {
|
|
||||||
fmt.Print(" 0 ")
|
|
||||||
}
|
|
||||||
for idx := int(F[i]); idx <= lastIdx; idx++ {
|
|
||||||
fmt.Printf("%2d ", H[i*width+idx-int(F[0])])
|
|
||||||
}
|
|
||||||
fmt.Println()
|
|
||||||
|
|
||||||
fmt.Print(" ")
|
|
||||||
for idx, p := range C[I : I+width] {
|
|
||||||
if idx+int(F[0]) < int(F[i]) {
|
|
||||||
p = 0
|
|
||||||
}
|
|
||||||
fmt.Printf("%2d ", p)
|
|
||||||
}
|
|
||||||
fmt.Println()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Phase 3. (Optional) Backtrace to find character positions
|
if DEBUG {
|
||||||
|
debugV2(T, pattern, F, lastIdx, H, C)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 4. (Optional) Backtrace to find character positions
|
||||||
pos := posArray(withPos, M)
|
pos := posArray(withPos, M)
|
||||||
j := int(F[0])
|
j := f0
|
||||||
if withPos {
|
if withPos {
|
||||||
i := M - 1
|
i := M - 1
|
||||||
j = maxScorePos
|
j = maxScorePos
|
||||||
preferMatch := true
|
preferMatch := true
|
||||||
for {
|
for {
|
||||||
I := i * width
|
I := i * width
|
||||||
j0 := j - int(F[0])
|
j0 := j - f0
|
||||||
s := H[I+j0]
|
s := H[I+j0]
|
||||||
|
|
||||||
var s1, s2 int16
|
var s1, s2 int16
|
||||||
@@ -434,7 +633,7 @@ func FuzzyMatchV2(caseSensitive bool, normalize bool, forward bool, input util.C
|
|||||||
}
|
}
|
||||||
|
|
||||||
if s > s1 && (s > s2 || s == s2 && preferMatch) {
|
if s > s1 && (s > s2 || s == s2 && preferMatch) {
|
||||||
*pos = append(*pos, j)
|
*pos = append(*pos, j+minIdx)
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -447,14 +646,14 @@ func FuzzyMatchV2(caseSensitive bool, normalize bool, forward bool, input util.C
|
|||||||
// Start offset we return here is only relevant when begin tiebreak is used.
|
// Start offset we return here is only relevant when begin tiebreak is used.
|
||||||
// However finding the accurate offset requires backtracking, and we don't
|
// However finding the accurate offset requires backtracking, and we don't
|
||||||
// want to pay extra cost for the option that has lost its importance.
|
// want to pay extra cost for the option that has lost its importance.
|
||||||
return Result{j, maxScorePos + 1, int(maxScore)}, pos
|
return Result{minIdx + j, minIdx + maxScorePos + 1, int(maxScore)}, pos
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement the same sorting criteria as V2
|
// Implement the same sorting criteria as V2
|
||||||
func calculateScore(caseSensitive bool, normalize bool, text util.Chars, pattern []rune, sidx int, eidx int, withPos bool) (int, *[]int) {
|
func calculateScore(caseSensitive bool, normalize bool, text *util.Chars, pattern []rune, sidx int, eidx int, withPos bool) (int, *[]int) {
|
||||||
pidx, score, inGap, consecutive, firstBonus := 0, 0, false, 0, int16(0)
|
pidx, score, inGap, consecutive, firstBonus := 0, 0, false, 0, int16(0)
|
||||||
pos := posArray(withPos, len(pattern))
|
pos := posArray(withPos, len(pattern))
|
||||||
prevClass := charNonWord
|
prevClass := initialCharClass
|
||||||
if sidx > 0 {
|
if sidx > 0 {
|
||||||
prevClass = charClassOf(text.Get(sidx - 1))
|
prevClass = charClassOf(text.Get(sidx - 1))
|
||||||
}
|
}
|
||||||
@@ -477,12 +676,12 @@ func calculateScore(caseSensitive bool, normalize bool, text util.Chars, pattern
|
|||||||
*pos = append(*pos, idx)
|
*pos = append(*pos, idx)
|
||||||
}
|
}
|
||||||
score += scoreMatch
|
score += scoreMatch
|
||||||
bonus := bonusFor(prevClass, class)
|
bonus := bonusMatrix[prevClass][class]
|
||||||
if consecutive == 0 {
|
if consecutive == 0 {
|
||||||
firstBonus = bonus
|
firstBonus = bonus
|
||||||
} else {
|
} else {
|
||||||
// Break consecutive chunk
|
// Break consecutive chunk
|
||||||
if bonus == bonusBoundary {
|
if bonus >= bonusBoundary && bonus > firstBonus {
|
||||||
firstBonus = bonus
|
firstBonus = bonus
|
||||||
}
|
}
|
||||||
bonus = util.Max16(util.Max16(bonus, firstBonus), bonusConsecutive)
|
bonus = util.Max16(util.Max16(bonus, firstBonus), bonusConsecutive)
|
||||||
@@ -497,7 +696,7 @@ func calculateScore(caseSensitive bool, normalize bool, text util.Chars, pattern
|
|||||||
pidx++
|
pidx++
|
||||||
} else {
|
} else {
|
||||||
if inGap {
|
if inGap {
|
||||||
score += scoreGapExtention
|
score += scoreGapExtension
|
||||||
} else {
|
} else {
|
||||||
score += scoreGapStart
|
score += scoreGapStart
|
||||||
}
|
}
|
||||||
@@ -511,10 +710,14 @@ func calculateScore(caseSensitive bool, normalize bool, text util.Chars, pattern
|
|||||||
}
|
}
|
||||||
|
|
||||||
// FuzzyMatchV1 performs fuzzy-match
|
// FuzzyMatchV1 performs fuzzy-match
|
||||||
func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
if len(pattern) == 0 {
|
if len(pattern) == 0 {
|
||||||
return Result{0, 0, 0}, nil
|
return Result{0, 0, 0}, nil
|
||||||
}
|
}
|
||||||
|
idx, _ := asciiFuzzyIndex(text, pattern, caseSensitive)
|
||||||
|
if idx < 0 {
|
||||||
|
return Result{-1, -1, 0}, nil
|
||||||
|
}
|
||||||
|
|
||||||
pidx := 0
|
pidx := 0
|
||||||
sidx := -1
|
sidx := -1
|
||||||
@@ -564,6 +767,9 @@ func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text util.Ch
|
|||||||
char = unicode.To(unicode.LowerCase, char)
|
char = unicode.To(unicode.LowerCase, char)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if normalize {
|
||||||
|
char = normalizeRune(char)
|
||||||
|
}
|
||||||
|
|
||||||
pidx_ := indexAt(pidx, lenPattern, forward)
|
pidx_ := indexAt(pidx, lenPattern, forward)
|
||||||
pchar := pattern[pidx_]
|
pchar := pattern[pidx_]
|
||||||
@@ -594,7 +800,15 @@ func FuzzyMatchV1(caseSensitive bool, normalize bool, forward bool, text util.Ch
|
|||||||
// bonus point, instead of stopping immediately after finding the first match.
|
// bonus point, instead of stopping immediately after finding the first match.
|
||||||
// The solution is much cheaper since there is only one possible alignment of
|
// The solution is much cheaper since there is only one possible alignment of
|
||||||
// the pattern.
|
// the pattern.
|
||||||
func ExactMatchNaive(caseSensitive bool, normalize bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
func ExactMatchNaive(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
|
return exactMatchNaive(caseSensitive, normalize, forward, false, text, pattern, withPos, slab)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExactMatchBoundary(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
|
return exactMatchNaive(caseSensitive, normalize, forward, true, text, pattern, withPos, slab)
|
||||||
|
}
|
||||||
|
|
||||||
|
func exactMatchNaive(caseSensitive bool, normalize bool, forward bool, boundaryCheck bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
if len(pattern) == 0 {
|
if len(pattern) == 0 {
|
||||||
return Result{0, 0, 0}, nil
|
return Result{0, 0, 0}, nil
|
||||||
}
|
}
|
||||||
@@ -606,9 +820,14 @@ func ExactMatchNaive(caseSensitive bool, normalize bool, forward bool, text util
|
|||||||
return Result{-1, -1, 0}, nil
|
return Result{-1, -1, 0}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
idx, _ := asciiFuzzyIndex(text, pattern, caseSensitive)
|
||||||
|
if idx < 0 {
|
||||||
|
return Result{-1, -1, 0}, nil
|
||||||
|
}
|
||||||
|
|
||||||
// For simplicity, only look at the bonus at the first character position
|
// For simplicity, only look at the bonus at the first character position
|
||||||
pidx := 0
|
pidx := 0
|
||||||
bestPos, bonus, bestBonus := -1, int16(0), int16(-1)
|
bestPos, bonus, bbonus, bestBonus := -1, int16(0), int16(0), int16(-1)
|
||||||
for index := 0; index < lenRunes; index++ {
|
for index := 0; index < lenRunes; index++ {
|
||||||
index_ := indexAt(index, lenRunes, forward)
|
index_ := indexAt(index, lenRunes, forward)
|
||||||
char := text.Get(index_)
|
char := text.Get(index_)
|
||||||
@@ -624,16 +843,37 @@ func ExactMatchNaive(caseSensitive bool, normalize bool, forward bool, text util
|
|||||||
}
|
}
|
||||||
pidx_ := indexAt(pidx, lenPattern, forward)
|
pidx_ := indexAt(pidx, lenPattern, forward)
|
||||||
pchar := pattern[pidx_]
|
pchar := pattern[pidx_]
|
||||||
if pchar == char {
|
ok := pchar == char
|
||||||
|
if ok {
|
||||||
if pidx_ == 0 {
|
if pidx_ == 0 {
|
||||||
bonus = bonusAt(text, index_)
|
bonus = bonusAt(text, index_)
|
||||||
}
|
}
|
||||||
|
if boundaryCheck {
|
||||||
|
if forward && pidx_ == 0 {
|
||||||
|
bbonus = bonus
|
||||||
|
} else if !forward && pidx_ == lenPattern-1 {
|
||||||
|
if index_ < lenRunes-1 {
|
||||||
|
bbonus = bonusAt(text, index_+1)
|
||||||
|
} else {
|
||||||
|
bbonus = bonusBoundaryWhite
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ok = bbonus >= bonusBoundary
|
||||||
|
if ok && pidx_ == 0 {
|
||||||
|
ok = index_ == 0 || charClassOf(text.Get(index_-1)) <= charDelimiter
|
||||||
|
}
|
||||||
|
if ok && pidx_ == len(pattern)-1 {
|
||||||
|
ok = index_ == lenRunes-1 || charClassOf(text.Get(index_+1)) <= charDelimiter
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ok {
|
||||||
pidx++
|
pidx++
|
||||||
if pidx == lenPattern {
|
if pidx == lenPattern {
|
||||||
if bonus > bestBonus {
|
if bonus > bestBonus {
|
||||||
bestPos, bestBonus = index, bonus
|
bestPos, bestBonus = index, bonus
|
||||||
}
|
}
|
||||||
if bonus == bonusBoundary {
|
if bonus >= bonusBoundary {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
index -= pidx - 1
|
index -= pidx - 1
|
||||||
@@ -653,24 +893,45 @@ func ExactMatchNaive(caseSensitive bool, normalize bool, forward bool, text util
|
|||||||
sidx = lenRunes - (bestPos + 1)
|
sidx = lenRunes - (bestPos + 1)
|
||||||
eidx = lenRunes - (bestPos - lenPattern + 1)
|
eidx = lenRunes - (bestPos - lenPattern + 1)
|
||||||
}
|
}
|
||||||
score, _ := calculateScore(caseSensitive, normalize, text, pattern, sidx, eidx, false)
|
var score int
|
||||||
|
if boundaryCheck {
|
||||||
|
// Underscore boundaries should be ranked lower than the other types of boundaries
|
||||||
|
score = int(bonus)
|
||||||
|
deduct := int(bonus-bonusBoundary) + 1
|
||||||
|
if sidx > 0 && text.Get(sidx-1) == '_' {
|
||||||
|
score -= deduct + 1
|
||||||
|
deduct = 1
|
||||||
|
}
|
||||||
|
if eidx < lenRunes && text.Get(eidx) == '_' {
|
||||||
|
score -= deduct
|
||||||
|
}
|
||||||
|
// Add base score so that this can compete with other match types e.g. 'foo' | bar
|
||||||
|
score += scoreMatch*lenPattern + int(bonusBoundaryWhite)*(lenPattern+1)
|
||||||
|
} else {
|
||||||
|
score, _ = calculateScore(caseSensitive, normalize, text, pattern, sidx, eidx, false)
|
||||||
|
}
|
||||||
return Result{sidx, eidx, score}, nil
|
return Result{sidx, eidx, score}, nil
|
||||||
}
|
}
|
||||||
return Result{-1, -1, 0}, nil
|
return Result{-1, -1, 0}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrefixMatch performs prefix-match
|
// PrefixMatch performs prefix-match
|
||||||
func PrefixMatch(caseSensitive bool, normalize bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
func PrefixMatch(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
if len(pattern) == 0 {
|
if len(pattern) == 0 {
|
||||||
return Result{0, 0, 0}, nil
|
return Result{0, 0, 0}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if text.Length() < len(pattern) {
|
trimmedLen := 0
|
||||||
|
if !unicode.IsSpace(pattern[0]) {
|
||||||
|
trimmedLen = text.LeadingWhitespaces()
|
||||||
|
}
|
||||||
|
|
||||||
|
if text.Length()-trimmedLen < len(pattern) {
|
||||||
return Result{-1, -1, 0}, nil
|
return Result{-1, -1, 0}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for index, r := range pattern {
|
for index, r := range pattern {
|
||||||
char := text.Get(index)
|
char := text.Get(trimmedLen + index)
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
char = unicode.ToLower(char)
|
char = unicode.ToLower(char)
|
||||||
}
|
}
|
||||||
@@ -682,14 +943,17 @@ func PrefixMatch(caseSensitive bool, normalize bool, forward bool, text util.Cha
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
lenPattern := len(pattern)
|
lenPattern := len(pattern)
|
||||||
score, _ := calculateScore(caseSensitive, normalize, text, pattern, 0, lenPattern, false)
|
score, _ := calculateScore(caseSensitive, normalize, text, pattern, trimmedLen, trimmedLen+lenPattern, false)
|
||||||
return Result{0, lenPattern, score}, nil
|
return Result{trimmedLen, trimmedLen + lenPattern, score}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// SuffixMatch performs suffix-match
|
// SuffixMatch performs suffix-match
|
||||||
func SuffixMatch(caseSensitive bool, normalize bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
func SuffixMatch(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
lenRunes := text.Length()
|
lenRunes := text.Length()
|
||||||
trimmedLen := lenRunes - text.TrailingWhitespaces()
|
trimmedLen := lenRunes
|
||||||
|
if len(pattern) == 0 || !unicode.IsSpace(pattern[len(pattern)-1]) {
|
||||||
|
trimmedLen -= text.TrailingWhitespaces()
|
||||||
|
}
|
||||||
if len(pattern) == 0 {
|
if len(pattern) == 0 {
|
||||||
return Result{trimmedLen, trimmedLen, 0}, nil
|
return Result{trimmedLen, trimmedLen, 0}, nil
|
||||||
}
|
}
|
||||||
@@ -718,16 +982,32 @@ func SuffixMatch(caseSensitive bool, normalize bool, forward bool, text util.Cha
|
|||||||
}
|
}
|
||||||
|
|
||||||
// EqualMatch performs equal-match
|
// EqualMatch performs equal-match
|
||||||
func EqualMatch(caseSensitive bool, normalize bool, forward bool, text util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
func EqualMatch(caseSensitive bool, normalize bool, forward bool, text *util.Chars, pattern []rune, withPos bool, slab *util.Slab) (Result, *[]int) {
|
||||||
lenPattern := len(pattern)
|
lenPattern := len(pattern)
|
||||||
if text.Length() != lenPattern {
|
if lenPattern == 0 {
|
||||||
|
return Result{-1, -1, 0}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strip leading whitespaces
|
||||||
|
trimmedLen := 0
|
||||||
|
if !unicode.IsSpace(pattern[0]) {
|
||||||
|
trimmedLen = text.LeadingWhitespaces()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strip trailing whitespaces
|
||||||
|
trimmedEndLen := 0
|
||||||
|
if !unicode.IsSpace(pattern[lenPattern-1]) {
|
||||||
|
trimmedEndLen = text.TrailingWhitespaces()
|
||||||
|
}
|
||||||
|
|
||||||
|
if text.Length()-trimmedLen-trimmedEndLen != lenPattern {
|
||||||
return Result{-1, -1, 0}, nil
|
return Result{-1, -1, 0}, nil
|
||||||
}
|
}
|
||||||
match := true
|
match := true
|
||||||
if normalize {
|
if normalize {
|
||||||
runes := text.ToRunes()
|
runes := text.ToRunes()
|
||||||
for idx, pchar := range pattern {
|
for idx, pchar := range pattern {
|
||||||
char := runes[idx]
|
char := runes[trimmedLen+idx]
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
char = unicode.To(unicode.LowerCase, char)
|
char = unicode.To(unicode.LowerCase, char)
|
||||||
}
|
}
|
||||||
@@ -737,15 +1017,16 @@ func EqualMatch(caseSensitive bool, normalize bool, forward bool, text util.Char
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
runesStr := text.ToString()
|
runes := text.ToRunes()
|
||||||
|
runesStr := string(runes[trimmedLen : len(runes)-trimmedEndLen])
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
runesStr = strings.ToLower(runesStr)
|
runesStr = strings.ToLower(runesStr)
|
||||||
}
|
}
|
||||||
match = runesStr == string(pattern)
|
match = runesStr == string(pattern)
|
||||||
}
|
}
|
||||||
if match {
|
if match {
|
||||||
return Result{0, lenPattern, (scoreMatch+bonusBoundary)*lenPattern +
|
return Result{trimmedLen, trimmedLen + lenPattern, (scoreMatch+int(bonusBoundaryWhite))*lenPattern +
|
||||||
(bonusFirstCharMultiplier-1)*bonusBoundary}, nil
|
(bonusFirstCharMultiplier-1)*int(bonusBoundaryWhite)}, nil
|
||||||
}
|
}
|
||||||
return Result{-1, -1, 0}, nil
|
return Result{-1, -1, 0}, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,6 +9,10 @@ import (
|
|||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
Init("default")
|
||||||
|
}
|
||||||
|
|
||||||
func assertMatch(t *testing.T, fun Algo, caseSensitive, forward bool, input, pattern string, sidx int, eidx int, score int) {
|
func assertMatch(t *testing.T, fun Algo, caseSensitive, forward bool, input, pattern string, sidx int, eidx int, score int) {
|
||||||
assertMatch2(t, fun, caseSensitive, false, forward, input, pattern, sidx, eidx, score)
|
assertMatch2(t, fun, caseSensitive, false, forward, input, pattern, sidx, eidx, score)
|
||||||
}
|
}
|
||||||
@@ -17,7 +21,8 @@ func assertMatch2(t *testing.T, fun Algo, caseSensitive, normalize, forward bool
|
|||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
pattern = strings.ToLower(pattern)
|
pattern = strings.ToLower(pattern)
|
||||||
}
|
}
|
||||||
res, pos := fun(caseSensitive, normalize, forward, util.RunesToChars([]rune(input)), []rune(pattern), true, nil)
|
chars := util.ToChars([]byte(input))
|
||||||
|
res, pos := fun(caseSensitive, normalize, forward, &chars, []rune(pattern), true, nil)
|
||||||
var start, end int
|
var start, end int
|
||||||
if pos == nil || len(*pos) == 0 {
|
if pos == nil || len(*pos) == 0 {
|
||||||
start = res.Start
|
start = res.Start
|
||||||
@@ -42,46 +47,46 @@ func TestFuzzyMatch(t *testing.T) {
|
|||||||
for _, fn := range []Algo{FuzzyMatchV1, FuzzyMatchV2} {
|
for _, fn := range []Algo{FuzzyMatchV1, FuzzyMatchV2} {
|
||||||
for _, forward := range []bool{true, false} {
|
for _, forward := range []bool{true, false} {
|
||||||
assertMatch(t, fn, false, forward, "fooBarbaz1", "oBZ", 2, 9,
|
assertMatch(t, fn, false, forward, "fooBarbaz1", "oBZ", 2, 9,
|
||||||
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtention*3)
|
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtension*3)
|
||||||
assertMatch(t, fn, false, forward, "foo bar baz", "fbb", 0, 9,
|
assertMatch(t, fn, false, forward, "foo bar baz", "fbb", 0, 9,
|
||||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+
|
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||||
bonusBoundary*2+2*scoreGapStart+4*scoreGapExtention)
|
int(bonusBoundaryWhite)*2+2*scoreGapStart+4*scoreGapExtension)
|
||||||
assertMatch(t, fn, false, forward, "/AutomatorDocument.icns", "rdoc", 9, 13,
|
assertMatch(t, fn, false, forward, "/AutomatorDocument.icns", "rdoc", 9, 13,
|
||||||
scoreMatch*4+bonusCamel123+bonusConsecutive*2)
|
scoreMatch*4+bonusCamel123+bonusConsecutive*2)
|
||||||
assertMatch(t, fn, false, forward, "/man1/zshcompctl.1", "zshc", 6, 10,
|
assertMatch(t, fn, false, forward, "/man1/zshcompctl.1", "zshc", 6, 10,
|
||||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*3)
|
scoreMatch*4+int(bonusBoundaryDelimiter)*bonusFirstCharMultiplier+int(bonusBoundaryDelimiter)*3)
|
||||||
assertMatch(t, fn, false, forward, "/.oh-my-zsh/cache", "zshc", 8, 13,
|
assertMatch(t, fn, false, forward, "/.oh-my-zsh/cache", "zshc", 8, 13,
|
||||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*3+scoreGapStart)
|
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*2+scoreGapStart+int(bonusBoundaryDelimiter))
|
||||||
assertMatch(t, fn, false, forward, "ab0123 456", "12356", 3, 10,
|
assertMatch(t, fn, false, forward, "ab0123 456", "12356", 3, 10,
|
||||||
scoreMatch*5+bonusConsecutive*3+scoreGapStart+scoreGapExtention)
|
scoreMatch*5+bonusConsecutive*3+scoreGapStart+scoreGapExtension)
|
||||||
assertMatch(t, fn, false, forward, "abc123 456", "12356", 3, 10,
|
assertMatch(t, fn, false, forward, "abc123 456", "12356", 3, 10,
|
||||||
scoreMatch*5+bonusCamel123*bonusFirstCharMultiplier+bonusCamel123*2+bonusConsecutive+scoreGapStart+scoreGapExtention)
|
scoreMatch*5+bonusCamel123*bonusFirstCharMultiplier+bonusCamel123*2+bonusConsecutive+scoreGapStart+scoreGapExtension)
|
||||||
assertMatch(t, fn, false, forward, "foo/bar/baz", "fbb", 0, 9,
|
assertMatch(t, fn, false, forward, "foo/bar/baz", "fbb", 0, 9,
|
||||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+
|
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||||
bonusBoundary*2+2*scoreGapStart+4*scoreGapExtention)
|
int(bonusBoundaryDelimiter)*2+2*scoreGapStart+4*scoreGapExtension)
|
||||||
assertMatch(t, fn, false, forward, "fooBarBaz", "fbb", 0, 7,
|
assertMatch(t, fn, false, forward, "fooBarBaz", "fbb", 0, 7,
|
||||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+
|
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||||
bonusCamel123*2+2*scoreGapStart+2*scoreGapExtention)
|
bonusCamel123*2+2*scoreGapStart+2*scoreGapExtension)
|
||||||
assertMatch(t, fn, false, forward, "foo barbaz", "fbb", 0, 8,
|
assertMatch(t, fn, false, forward, "foo barbaz", "fbb", 0, 8,
|
||||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary+
|
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite)+
|
||||||
scoreGapStart*2+scoreGapExtention*3)
|
scoreGapStart*2+scoreGapExtension*3)
|
||||||
assertMatch(t, fn, false, forward, "fooBar Baz", "foob", 0, 4,
|
assertMatch(t, fn, false, forward, "fooBar Baz", "foob", 0, 4,
|
||||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*3)
|
scoreMatch*4+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite)*3)
|
||||||
assertMatch(t, fn, false, forward, "xFoo-Bar Baz", "foo-b", 1, 6,
|
assertMatch(t, fn, false, forward, "xFoo-Bar Baz", "foo-b", 1, 6,
|
||||||
scoreMatch*5+bonusCamel123*bonusFirstCharMultiplier+bonusCamel123*2+
|
scoreMatch*5+bonusCamel123*bonusFirstCharMultiplier+bonusCamel123*2+
|
||||||
bonusNonWord+bonusBoundary)
|
bonusNonWord+bonusBoundary)
|
||||||
|
|
||||||
assertMatch(t, fn, true, forward, "fooBarbaz", "oBz", 2, 9,
|
assertMatch(t, fn, true, forward, "fooBarbaz", "oBz", 2, 9,
|
||||||
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtention*3)
|
scoreMatch*3+bonusCamel123+scoreGapStart+scoreGapExtension*3)
|
||||||
assertMatch(t, fn, true, forward, "Foo/Bar/Baz", "FBB", 0, 9,
|
assertMatch(t, fn, true, forward, "Foo/Bar/Baz", "FBB", 0, 9,
|
||||||
scoreMatch*3+bonusBoundary*(bonusFirstCharMultiplier+2)+
|
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryDelimiter)*2+
|
||||||
scoreGapStart*2+scoreGapExtention*4)
|
scoreGapStart*2+scoreGapExtension*4)
|
||||||
assertMatch(t, fn, true, forward, "FooBarBaz", "FBB", 0, 7,
|
assertMatch(t, fn, true, forward, "FooBarBaz", "FBB", 0, 7,
|
||||||
scoreMatch*3+bonusBoundary*bonusFirstCharMultiplier+bonusCamel123*2+
|
scoreMatch*3+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+bonusCamel123*2+
|
||||||
scoreGapStart*2+scoreGapExtention*2)
|
scoreGapStart*2+scoreGapExtension*2)
|
||||||
assertMatch(t, fn, true, forward, "FooBar Baz", "FooB", 0, 4,
|
assertMatch(t, fn, true, forward, "FooBar Baz", "FooB", 0, 4,
|
||||||
scoreMatch*4+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary*2+
|
scoreMatch*4+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite)*2+
|
||||||
util.Max(bonusCamel123, bonusBoundary))
|
util.Max(bonusCamel123, int(bonusBoundaryWhite)))
|
||||||
|
|
||||||
// Consecutive bonus updated
|
// Consecutive bonus updated
|
||||||
assertMatch(t, fn, true, forward, "foo-bar", "o-ba", 2, 6,
|
assertMatch(t, fn, true, forward, "foo-bar", "o-ba", 2, 6,
|
||||||
@@ -97,10 +102,10 @@ func TestFuzzyMatch(t *testing.T) {
|
|||||||
|
|
||||||
func TestFuzzyMatchBackward(t *testing.T) {
|
func TestFuzzyMatchBackward(t *testing.T) {
|
||||||
assertMatch(t, FuzzyMatchV1, false, true, "foobar fb", "fb", 0, 4,
|
assertMatch(t, FuzzyMatchV1, false, true, "foobar fb", "fb", 0, 4,
|
||||||
scoreMatch*2+bonusBoundary*bonusFirstCharMultiplier+
|
scoreMatch*2+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+
|
||||||
scoreGapStart+scoreGapExtention)
|
scoreGapStart+scoreGapExtension)
|
||||||
assertMatch(t, FuzzyMatchV1, false, false, "foobar fb", "fb", 7, 9,
|
assertMatch(t, FuzzyMatchV1, false, false, "foobar fb", "fb", 7, 9,
|
||||||
scoreMatch*2+bonusBoundary*bonusFirstCharMultiplier+bonusBoundary)
|
scoreMatch*2+int(bonusBoundaryWhite)*bonusFirstCharMultiplier+int(bonusBoundaryWhite))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExactMatchNaive(t *testing.T) {
|
func TestExactMatchNaive(t *testing.T) {
|
||||||
@@ -113,9 +118,9 @@ func TestExactMatchNaive(t *testing.T) {
|
|||||||
assertMatch(t, ExactMatchNaive, false, dir, "/AutomatorDocument.icns", "rdoc", 9, 13,
|
assertMatch(t, ExactMatchNaive, false, dir, "/AutomatorDocument.icns", "rdoc", 9, 13,
|
||||||
scoreMatch*4+bonusCamel123+bonusConsecutive*2)
|
scoreMatch*4+bonusCamel123+bonusConsecutive*2)
|
||||||
assertMatch(t, ExactMatchNaive, false, dir, "/man1/zshcompctl.1", "zshc", 6, 10,
|
assertMatch(t, ExactMatchNaive, false, dir, "/man1/zshcompctl.1", "zshc", 6, 10,
|
||||||
scoreMatch*4+bonusBoundary*(bonusFirstCharMultiplier+3))
|
scoreMatch*4+int(bonusBoundaryDelimiter)*(bonusFirstCharMultiplier+3))
|
||||||
assertMatch(t, ExactMatchNaive, false, dir, "/.oh-my-zsh/cache", "zsh/c", 8, 13,
|
assertMatch(t, ExactMatchNaive, false, dir, "/.oh-my-zsh/cache", "zsh/c", 8, 13,
|
||||||
scoreMatch*5+bonusBoundary*(bonusFirstCharMultiplier+4))
|
scoreMatch*5+bonusBoundary*(bonusFirstCharMultiplier+3)+int(bonusBoundaryDelimiter))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -127,7 +132,7 @@ func TestExactMatchNaiveBackward(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestPrefixMatch(t *testing.T) {
|
func TestPrefixMatch(t *testing.T) {
|
||||||
score := (scoreMatch+bonusBoundary)*3 + bonusBoundary*(bonusFirstCharMultiplier-1)
|
score := scoreMatch*3 + int(bonusBoundaryWhite)*bonusFirstCharMultiplier + int(bonusBoundaryWhite)*2
|
||||||
|
|
||||||
for _, dir := range []bool{true, false} {
|
for _, dir := range []bool{true, false} {
|
||||||
assertMatch(t, PrefixMatch, true, dir, "fooBarbaz", "Foo", -1, -1, 0)
|
assertMatch(t, PrefixMatch, true, dir, "fooBarbaz", "Foo", -1, -1, 0)
|
||||||
@@ -135,6 +140,10 @@ func TestPrefixMatch(t *testing.T) {
|
|||||||
assertMatch(t, PrefixMatch, false, dir, "fooBarbaz", "Foo", 0, 3, score)
|
assertMatch(t, PrefixMatch, false, dir, "fooBarbaz", "Foo", 0, 3, score)
|
||||||
assertMatch(t, PrefixMatch, false, dir, "foOBarBaZ", "foo", 0, 3, score)
|
assertMatch(t, PrefixMatch, false, dir, "foOBarBaZ", "foo", 0, 3, score)
|
||||||
assertMatch(t, PrefixMatch, false, dir, "f-oBarbaz", "f-o", 0, 3, score)
|
assertMatch(t, PrefixMatch, false, dir, "f-oBarbaz", "f-o", 0, 3, score)
|
||||||
|
|
||||||
|
assertMatch(t, PrefixMatch, false, dir, " fooBar", "foo", 1, 4, score)
|
||||||
|
assertMatch(t, PrefixMatch, false, dir, " fooBar", " fo", 0, 3, score)
|
||||||
|
assertMatch(t, PrefixMatch, false, dir, " fo", "foo", -1, -1, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -147,6 +156,14 @@ func TestSuffixMatch(t *testing.T) {
|
|||||||
scoreMatch*3+bonusConsecutive*2)
|
scoreMatch*3+bonusConsecutive*2)
|
||||||
assertMatch(t, SuffixMatch, false, dir, "fooBarBaZ", "baz", 6, 9,
|
assertMatch(t, SuffixMatch, false, dir, "fooBarBaZ", "baz", 6, 9,
|
||||||
(scoreMatch+bonusCamel123)*3+bonusCamel123*(bonusFirstCharMultiplier-1))
|
(scoreMatch+bonusCamel123)*3+bonusCamel123*(bonusFirstCharMultiplier-1))
|
||||||
|
|
||||||
|
// Strip trailing white space from the string
|
||||||
|
assertMatch(t, SuffixMatch, false, dir, "fooBarbaz ", "baz", 6, 9,
|
||||||
|
scoreMatch*3+bonusConsecutive*2)
|
||||||
|
|
||||||
|
// Only when the pattern doesn't end with a space
|
||||||
|
assertMatch(t, SuffixMatch, false, dir, "fooBarbaz ", "baz ", 6, 10,
|
||||||
|
scoreMatch*4+bonusConsecutive*2+int(bonusBoundaryWhite))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -170,9 +187,9 @@ func TestNormalize(t *testing.T) {
|
|||||||
input, pattern, sidx, eidx, score)
|
input, pattern, sidx, eidx, score)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
test("Só Danço Samba", "So", 0, 2, 56, FuzzyMatchV1, FuzzyMatchV2, PrefixMatch, ExactMatchNaive)
|
test("Só Danço Samba", "So", 0, 2, 62, FuzzyMatchV1, FuzzyMatchV2, PrefixMatch, ExactMatchNaive)
|
||||||
test("Só Danço Samba", "sodc", 0, 7, 89, FuzzyMatchV1, FuzzyMatchV2)
|
test("Só Danço Samba", "sodc", 0, 7, 97, FuzzyMatchV1, FuzzyMatchV2)
|
||||||
test("Danço", "danco", 0, 5, 128, FuzzyMatchV1, FuzzyMatchV2, PrefixMatch, SuffixMatch, ExactMatchNaive, EqualMatch)
|
test("Danço", "danco", 0, 5, 140, FuzzyMatchV1, FuzzyMatchV2, PrefixMatch, SuffixMatch, ExactMatchNaive, EqualMatch)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLongString(t *testing.T) {
|
func TestLongString(t *testing.T) {
|
||||||
@@ -183,3 +200,12 @@ func TestLongString(t *testing.T) {
|
|||||||
bytes[math.MaxUint16] = 'z'
|
bytes[math.MaxUint16] = 'z'
|
||||||
assertMatch(t, FuzzyMatchV2, true, true, string(bytes), "zx", math.MaxUint16, math.MaxUint16+2, scoreMatch*2+bonusConsecutive)
|
assertMatch(t, FuzzyMatchV2, true, true, string(bytes), "zx", math.MaxUint16, math.MaxUint16+2, scoreMatch*2+bonusConsecutive)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestLongStringWithNormalize(t *testing.T) {
|
||||||
|
bytes := make([]byte, 30000)
|
||||||
|
for i := range bytes {
|
||||||
|
bytes[i] = 'x'
|
||||||
|
}
|
||||||
|
unicodeString := string(bytes) + " Minímal example"
|
||||||
|
assertMatch2(t, FuzzyMatchV1, false, true, false, unicodeString, "minim", 30001, 30006, 140)
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
package algo
|
package algo
|
||||||
|
|
||||||
var normalized map[rune]rune = map[rune]rune{
|
var normalized = map[rune]rune{
|
||||||
0x00E1: 'a', // WITH ACUTE, LATIN SMALL LETTER
|
0x00E1: 'a', // WITH ACUTE, LATIN SMALL LETTER
|
||||||
0x0103: 'a', // WITH BREVE, LATIN SMALL LETTER
|
0x0103: 'a', // WITH BREVE, LATIN SMALL LETTER
|
||||||
0x01CE: 'a', // WITH CARON, LATIN SMALL LETTER
|
0x01CE: 'a', // WITH CARON, LATIN SMALL LETTER
|
||||||
@@ -405,6 +405,171 @@ var normalized map[rune]rune = map[rune]rune{
|
|||||||
0x024E: 'Y', // WITH STROKE, LATIN CAPITAL LETTER
|
0x024E: 'Y', // WITH STROKE, LATIN CAPITAL LETTER
|
||||||
0x028F: 'Y', // , LATIN LETTER SMALL CAPITAL
|
0x028F: 'Y', // , LATIN LETTER SMALL CAPITAL
|
||||||
0x1D22: 'Z', // , LATIN LETTER SMALL CAPITAL
|
0x1D22: 'Z', // , LATIN LETTER SMALL CAPITAL
|
||||||
|
|
||||||
|
'Ắ': 'A',
|
||||||
|
'Ấ': 'A',
|
||||||
|
'Ằ': 'A',
|
||||||
|
'Ầ': 'A',
|
||||||
|
'Ẳ': 'A',
|
||||||
|
'Ẩ': 'A',
|
||||||
|
'Ẵ': 'A',
|
||||||
|
'Ẫ': 'A',
|
||||||
|
'Ặ': 'A',
|
||||||
|
'Ậ': 'A',
|
||||||
|
|
||||||
|
'ắ': 'a',
|
||||||
|
'ấ': 'a',
|
||||||
|
'ằ': 'a',
|
||||||
|
'ầ': 'a',
|
||||||
|
'ẳ': 'a',
|
||||||
|
'ẩ': 'a',
|
||||||
|
'ẵ': 'a',
|
||||||
|
'ẫ': 'a',
|
||||||
|
'ặ': 'a',
|
||||||
|
'ậ': 'a',
|
||||||
|
|
||||||
|
'Ế': 'E',
|
||||||
|
'Ề': 'E',
|
||||||
|
'Ể': 'E',
|
||||||
|
'Ễ': 'E',
|
||||||
|
'Ệ': 'E',
|
||||||
|
|
||||||
|
'ế': 'e',
|
||||||
|
'ề': 'e',
|
||||||
|
'ể': 'e',
|
||||||
|
'ễ': 'e',
|
||||||
|
'ệ': 'e',
|
||||||
|
|
||||||
|
'Ố': 'O',
|
||||||
|
'Ớ': 'O',
|
||||||
|
'Ồ': 'O',
|
||||||
|
'Ờ': 'O',
|
||||||
|
'Ổ': 'O',
|
||||||
|
'Ở': 'O',
|
||||||
|
'Ỗ': 'O',
|
||||||
|
'Ỡ': 'O',
|
||||||
|
'Ộ': 'O',
|
||||||
|
'Ợ': 'O',
|
||||||
|
|
||||||
|
'ố': 'o',
|
||||||
|
'ớ': 'o',
|
||||||
|
'ồ': 'o',
|
||||||
|
'ờ': 'o',
|
||||||
|
'ổ': 'o',
|
||||||
|
'ở': 'o',
|
||||||
|
'ỗ': 'o',
|
||||||
|
'ỡ': 'o',
|
||||||
|
'ộ': 'o',
|
||||||
|
'ợ': 'o',
|
||||||
|
|
||||||
|
'Ứ': 'U',
|
||||||
|
'Ừ': 'U',
|
||||||
|
'Ử': 'U',
|
||||||
|
'Ữ': 'U',
|
||||||
|
'Ự': 'U',
|
||||||
|
|
||||||
|
'ứ': 'u',
|
||||||
|
'ừ': 'u',
|
||||||
|
'ử': 'u',
|
||||||
|
'ữ': 'u',
|
||||||
|
'ự': 'u',
|
||||||
|
|
||||||
|
// https://en.wikipedia.org/wiki/Halfwidth_and_Fullwidth_Forms_(Unicode_block)
|
||||||
|
0xFF01: '!', // Fullwidth exclamation
|
||||||
|
0xFF02: '"', // Fullwidth quotation mark
|
||||||
|
0xFF03: '#', // Fullwidth number sign
|
||||||
|
0xFF04: '$', // Fullwidth dollar sign
|
||||||
|
0xFF05: '%', // Fullwidth percent
|
||||||
|
0xFF06: '&', // Fullwidth ampersand
|
||||||
|
0xFF07: '\'', // Fullwidth apostrophe
|
||||||
|
0xFF08: '(', // Fullwidth left parenthesis
|
||||||
|
0xFF09: ')', // Fullwidth right parenthesis
|
||||||
|
0xFF0A: '*', // Fullwidth asterisk
|
||||||
|
0xFF0B: '+', // Fullwidth plus
|
||||||
|
0xFF0C: ',', // Fullwidth comma
|
||||||
|
0xFF0D: '-', // Fullwidth hyphen-minus
|
||||||
|
0xFF0E: '.', // Fullwidth period
|
||||||
|
0xFF0F: '/', // Fullwidth slash
|
||||||
|
0xFF10: '0',
|
||||||
|
0xFF11: '1',
|
||||||
|
0xFF12: '2',
|
||||||
|
0xFF13: '3',
|
||||||
|
0xFF14: '4',
|
||||||
|
0xFF15: '5',
|
||||||
|
0xFF16: '6',
|
||||||
|
0xFF17: '7',
|
||||||
|
0xFF18: '8',
|
||||||
|
0xFF19: '9',
|
||||||
|
0xFF1A: ':', // Fullwidth colon
|
||||||
|
0xFF1B: ';', // Fullwidth semicolon
|
||||||
|
0xFF1C: '<', // Fullwidth less-than
|
||||||
|
0xFF1D: '=', // Fullwidth equal
|
||||||
|
0xFF1E: '>', // Fullwidth greater-than
|
||||||
|
0xFF1F: '?', // Fullwidth question mark
|
||||||
|
0xFF20: '@', // Fullwidth at sign
|
||||||
|
0xFF21: 'A',
|
||||||
|
0xFF22: 'B',
|
||||||
|
0xFF23: 'C',
|
||||||
|
0xFF24: 'D',
|
||||||
|
0xFF25: 'E',
|
||||||
|
0xFF26: 'F',
|
||||||
|
0xFF27: 'G',
|
||||||
|
0xFF28: 'H',
|
||||||
|
0xFF29: 'I',
|
||||||
|
0xFF2A: 'J',
|
||||||
|
0xFF2B: 'K',
|
||||||
|
0xFF2C: 'L',
|
||||||
|
0xFF2D: 'M',
|
||||||
|
0xFF2E: 'N',
|
||||||
|
0xFF2F: 'O',
|
||||||
|
0xFF30: 'P',
|
||||||
|
0xFF31: 'Q',
|
||||||
|
0xFF32: 'R',
|
||||||
|
0xFF33: 'S',
|
||||||
|
0xFF34: 'T',
|
||||||
|
0xFF35: 'U',
|
||||||
|
0xFF36: 'V',
|
||||||
|
0xFF37: 'W',
|
||||||
|
0xFF38: 'X',
|
||||||
|
0xFF39: 'Y',
|
||||||
|
0xFF3A: 'Z',
|
||||||
|
0xFF3B: '[', // Fullwidth left bracket
|
||||||
|
0xFF3C: '\\', // Fullwidth backslash
|
||||||
|
0xFF3D: ']', // Fullwidth right bracket
|
||||||
|
0xFF3E: '^', // Fullwidth circumflex
|
||||||
|
0xFF3F: '_', // Fullwidth underscore
|
||||||
|
0xFF40: '`', // Fullwidth grave accent
|
||||||
|
0xFF41: 'a',
|
||||||
|
0xFF42: 'b',
|
||||||
|
0xFF43: 'c',
|
||||||
|
0xFF44: 'd',
|
||||||
|
0xFF45: 'e',
|
||||||
|
0xFF46: 'f',
|
||||||
|
0xFF47: 'g',
|
||||||
|
0xFF48: 'h',
|
||||||
|
0xFF49: 'i',
|
||||||
|
0xFF4A: 'j',
|
||||||
|
0xFF4B: 'k',
|
||||||
|
0xFF4C: 'l',
|
||||||
|
0xFF4D: 'm',
|
||||||
|
0xFF4E: 'n',
|
||||||
|
0xFF4F: 'o',
|
||||||
|
0xFF50: 'p',
|
||||||
|
0xFF51: 'q',
|
||||||
|
0xFF52: 'r',
|
||||||
|
0xFF53: 's',
|
||||||
|
0xFF54: 't',
|
||||||
|
0xFF55: 'u',
|
||||||
|
0xFF56: 'v',
|
||||||
|
0xFF57: 'w',
|
||||||
|
0xFF58: 'x',
|
||||||
|
0xFF59: 'y',
|
||||||
|
0xFF5A: 'z',
|
||||||
|
0xFF5B: '{', // Fullwidth left brace
|
||||||
|
0xFF5C: '|', // Fullwidth vertical bar
|
||||||
|
0xFF5D: '}', // Fullwidth right brace
|
||||||
|
0xFF5E: '~', // Fullwidth tilde
|
||||||
|
0xFF61: '.', // Halfwidth ideographic full stop
|
||||||
}
|
}
|
||||||
|
|
||||||
// NormalizeRunes normalizes latin script letters
|
// NormalizeRunes normalizes latin script letters
|
||||||
@@ -412,7 +577,7 @@ func NormalizeRunes(runes []rune) []rune {
|
|||||||
ret := make([]rune, len(runes))
|
ret := make([]rune, len(runes))
|
||||||
copy(ret, runes)
|
copy(ret, runes)
|
||||||
for idx, r := range runes {
|
for idx, r := range runes {
|
||||||
if r < 0x00C0 || r > 0x2184 {
|
if r < 0x00C0 || r > 0xFF61 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
n := normalized[r]
|
n := normalized[r]
|
||||||
|
|||||||
425
src/ansi.go
425
src/ansi.go
@@ -1,8 +1,7 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"fmt"
|
||||||
"regexp"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
@@ -15,120 +14,414 @@ type ansiOffset struct {
|
|||||||
color ansiState
|
color ansiState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type url struct {
|
||||||
|
uri string
|
||||||
|
params string
|
||||||
|
}
|
||||||
|
|
||||||
type ansiState struct {
|
type ansiState struct {
|
||||||
fg tui.Color
|
fg tui.Color
|
||||||
bg tui.Color
|
bg tui.Color
|
||||||
attr tui.Attr
|
attr tui.Attr
|
||||||
|
lbg tui.Color
|
||||||
|
url *url
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ansiState) colored() bool {
|
func (s *ansiState) colored() bool {
|
||||||
return s.fg != -1 || s.bg != -1 || s.attr > 0
|
return s.fg != -1 || s.bg != -1 || s.attr > 0 || s.lbg >= 0 || s.url != nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ansiState) equals(t *ansiState) bool {
|
func (s *ansiState) equals(t *ansiState) bool {
|
||||||
if t == nil {
|
if t == nil {
|
||||||
return !s.colored()
|
return !s.colored()
|
||||||
}
|
}
|
||||||
return s.fg == t.fg && s.bg == t.bg && s.attr == t.attr
|
return s.fg == t.fg && s.bg == t.bg && s.attr == t.attr && s.lbg == t.lbg && s.url == t.url
|
||||||
}
|
}
|
||||||
|
|
||||||
var ansiRegex *regexp.Regexp
|
func (s *ansiState) ToString() string {
|
||||||
|
if !s.colored() {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
ret := ""
|
||||||
/*
|
if s.attr&tui.Bold > 0 || s.attr&tui.BoldForce > 0 {
|
||||||
References:
|
ret += "1;"
|
||||||
- https://github.com/gnachman/iTerm2
|
}
|
||||||
- http://ascii-table.com/ansi-escape-sequences.php
|
if s.attr&tui.Dim > 0 {
|
||||||
- http://ascii-table.com/ansi-escape-sequences-vt-100.php
|
ret += "2;"
|
||||||
- http://tldp.org/HOWTO/Bash-Prompt-HOWTO/x405.html
|
}
|
||||||
*/
|
if s.attr&tui.Italic > 0 {
|
||||||
// The following regular expression will include not all but most of the
|
ret += "3;"
|
||||||
// frequently used ANSI sequences
|
}
|
||||||
ansiRegex = regexp.MustCompile("\x1b[\\[()][0-9;]*[a-zA-Z@]|\x1b.|[\x08\x0e\x0f]")
|
if s.attr&tui.Underline > 0 {
|
||||||
|
ret += "4;"
|
||||||
|
}
|
||||||
|
if s.attr&tui.Blink > 0 {
|
||||||
|
ret += "5;"
|
||||||
|
}
|
||||||
|
if s.attr&tui.Reverse > 0 {
|
||||||
|
ret += "7;"
|
||||||
|
}
|
||||||
|
if s.attr&tui.StrikeThrough > 0 {
|
||||||
|
ret += "9;"
|
||||||
|
}
|
||||||
|
ret += toAnsiString(s.fg, 30) + toAnsiString(s.bg, 40)
|
||||||
|
|
||||||
|
ret = "\x1b[" + strings.TrimSuffix(ret, ";") + "m"
|
||||||
|
if s.url != nil {
|
||||||
|
ret = fmt.Sprintf("\x1b]8;%s;%s\x1b\\%s\x1b]8;;\x1b", s.url.params, s.url.uri, ret)
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func toAnsiString(color tui.Color, offset int) string {
|
||||||
|
col := int(color)
|
||||||
|
ret := ""
|
||||||
|
if col == -1 {
|
||||||
|
ret += strconv.Itoa(offset + 9)
|
||||||
|
} else if col < 8 {
|
||||||
|
ret += strconv.Itoa(offset + col)
|
||||||
|
} else if col < 16 {
|
||||||
|
ret += strconv.Itoa(offset - 30 + 90 + col - 8)
|
||||||
|
} else if col < 256 {
|
||||||
|
ret += strconv.Itoa(offset+8) + ";5;" + strconv.Itoa(col)
|
||||||
|
} else if col >= (1 << 24) {
|
||||||
|
r := strconv.Itoa((col >> 16) & 0xff)
|
||||||
|
g := strconv.Itoa((col >> 8) & 0xff)
|
||||||
|
b := strconv.Itoa(col & 0xff)
|
||||||
|
ret += strconv.Itoa(offset+8) + ";2;" + r + ";" + g + ";" + b
|
||||||
|
}
|
||||||
|
return ret + ";"
|
||||||
|
}
|
||||||
|
|
||||||
|
func isPrint(c uint8) bool {
|
||||||
|
return '\x20' <= c && c <= '\x7e'
|
||||||
|
}
|
||||||
|
|
||||||
|
func matchOperatingSystemCommand(s string, start int) int {
|
||||||
|
// `\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)`
|
||||||
|
// ^ match starting here after the first printable character
|
||||||
|
//
|
||||||
|
i := start // prefix matched in nextAnsiEscapeSequence()
|
||||||
|
for ; i < len(s) && isPrint(s[i]); i++ {
|
||||||
|
}
|
||||||
|
if i < len(s) {
|
||||||
|
if s[i] == '\x07' {
|
||||||
|
return i + 1
|
||||||
|
}
|
||||||
|
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
||||||
|
// ------
|
||||||
|
if s[i] == '\x1b' && i < len(s)-1 && s[i+1] == '\\' {
|
||||||
|
return i + 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
||||||
|
// ------------
|
||||||
|
if i < len(s) && s[:i+1] == "\x1b]8;;\x1b" {
|
||||||
|
return i + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
func matchControlSequence(s string) int {
|
||||||
|
// `\x1b[\\[()][0-9;:?]*[a-zA-Z@]`
|
||||||
|
// ^ match starting here
|
||||||
|
//
|
||||||
|
i := 2 // prefix matched in nextAnsiEscapeSequence()
|
||||||
|
for ; i < len(s); i++ {
|
||||||
|
c := s[i]
|
||||||
|
switch c {
|
||||||
|
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ';', ':', '?':
|
||||||
|
// ok
|
||||||
|
default:
|
||||||
|
if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '@' {
|
||||||
|
return i + 1
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
func isCtrlSeqStart(c uint8) bool {
|
||||||
|
switch c {
|
||||||
|
case '\\', '[', '(', ')':
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// nextAnsiEscapeSequence returns the ANSI escape sequence and is equivalent to
|
||||||
|
// calling FindStringIndex() on the below regex (which was originally used):
|
||||||
|
//
|
||||||
|
// "(?:\x1b[\\[()][0-9;:?]*[a-zA-Z@]|\x1b][0-9]+[;:][[:print:]]+(?:\x1b\\\\|\x07)|\x1b.|[\x0e\x0f]|.\x08|\n)"
|
||||||
|
func nextAnsiEscapeSequence(s string) (int, int) {
|
||||||
|
// fast check for ANSI escape sequences
|
||||||
|
i := 0
|
||||||
|
for ; i < len(s); i++ {
|
||||||
|
switch s[i] {
|
||||||
|
case '\x0e', '\x0f', '\x1b', '\x08', '\n':
|
||||||
|
// We ignore the fact that '\x08' cannot be the first char
|
||||||
|
// in the string and be an escape sequence for the sake of
|
||||||
|
// speed and simplicity.
|
||||||
|
goto Loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1, -1
|
||||||
|
|
||||||
|
Loop:
|
||||||
|
for ; i < len(s); i++ {
|
||||||
|
switch s[i] {
|
||||||
|
case '\n':
|
||||||
|
// match: `\n`
|
||||||
|
return i, i + 1
|
||||||
|
case '\x08':
|
||||||
|
// backtrack to match: `.\x08`
|
||||||
|
if i > 0 && s[i-1] != '\n' {
|
||||||
|
if s[i-1] < utf8.RuneSelf {
|
||||||
|
return i - 1, i + 1
|
||||||
|
}
|
||||||
|
_, n := utf8.DecodeLastRuneInString(s[:i])
|
||||||
|
return i - n, i + 1
|
||||||
|
}
|
||||||
|
case '\x1b':
|
||||||
|
// match: `\x1b[\\[()][0-9;:?]*[a-zA-Z@]`
|
||||||
|
if i+2 < len(s) && isCtrlSeqStart(s[i+1]) {
|
||||||
|
if j := matchControlSequence(s[i:]); j != -1 {
|
||||||
|
return i, i + j
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// match: `\x1b][0-9]+[;:][[:print:]]+(?:\x1b\\\\|\x07)`
|
||||||
|
if i+5 < len(s) && s[i+1] == ']' {
|
||||||
|
j := 2
|
||||||
|
// \x1b][0-9]+[;:][[:print:]]+(?:\x1b\\\\|\x07)
|
||||||
|
// ------
|
||||||
|
for ; i+j < len(s) && isNumeric(s[i+j]); j++ {
|
||||||
|
}
|
||||||
|
|
||||||
|
// \x1b][0-9]+[;:][[:print:]]+(?:\x1b\\\\|\x07)
|
||||||
|
// ---------------
|
||||||
|
if j > 2 && i+j+1 < len(s) && (s[i+j] == ';' || s[i+j] == ':') && isPrint(s[i+j+1]) {
|
||||||
|
if k := matchOperatingSystemCommand(s[i:], j+2); k != -1 {
|
||||||
|
return i, i + k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// match: `\x1b.`
|
||||||
|
if i+1 < len(s) && s[i+1] != '\n' {
|
||||||
|
if s[i+1] < utf8.RuneSelf {
|
||||||
|
return i, i + 2
|
||||||
|
}
|
||||||
|
_, n := utf8.DecodeRuneInString(s[i+1:])
|
||||||
|
return i, i + n + 1
|
||||||
|
}
|
||||||
|
case '\x0e', '\x0f':
|
||||||
|
// match: `[\x0e\x0f]`
|
||||||
|
return i, i + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1, -1
|
||||||
}
|
}
|
||||||
|
|
||||||
func extractColor(str string, state *ansiState, proc func(string, *ansiState) bool) (string, *[]ansiOffset, *ansiState) {
|
func extractColor(str string, state *ansiState, proc func(string, *ansiState) bool) (string, *[]ansiOffset, *ansiState) {
|
||||||
var offsets []ansiOffset
|
// We append to a stack allocated variable that we'll
|
||||||
var output bytes.Buffer
|
// later copy and return, to save on allocations.
|
||||||
|
offsets := make([]ansiOffset, 0, 32)
|
||||||
|
|
||||||
if state != nil {
|
if state != nil {
|
||||||
offsets = append(offsets, ansiOffset{[2]int32{0, 0}, *state})
|
offsets = append(offsets, ansiOffset{[2]int32{0, 0}, *state})
|
||||||
}
|
}
|
||||||
|
|
||||||
idx := 0
|
var (
|
||||||
for _, offset := range ansiRegex.FindAllStringIndex(str, -1) {
|
pstate *ansiState // lazily allocated
|
||||||
prev := str[idx:offset[0]]
|
output strings.Builder
|
||||||
output.WriteString(prev)
|
prevIdx int
|
||||||
|
runeCount int
|
||||||
|
)
|
||||||
|
for idx := 0; idx < len(str); {
|
||||||
|
// Make sure that we found an ANSI code
|
||||||
|
start, end := nextAnsiEscapeSequence(str[idx:])
|
||||||
|
if start == -1 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
start += idx
|
||||||
|
idx += end
|
||||||
|
|
||||||
|
// Check if we should continue
|
||||||
|
prev := str[prevIdx:start]
|
||||||
if proc != nil && !proc(prev, state) {
|
if proc != nil && !proc(prev, state) {
|
||||||
return "", nil, nil
|
return "", nil, nil
|
||||||
}
|
}
|
||||||
newState := interpretCode(str[offset[0]:offset[1]], state)
|
prevIdx = idx
|
||||||
|
|
||||||
if !newState.equals(state) {
|
if len(prev) != 0 {
|
||||||
|
runeCount += utf8.RuneCountInString(prev)
|
||||||
|
// Grow the buffer size to the maximum possible length (string length
|
||||||
|
// containing ansi codes) to avoid repetitive allocation
|
||||||
|
if output.Cap() == 0 {
|
||||||
|
output.Grow(len(str))
|
||||||
|
}
|
||||||
|
output.WriteString(prev)
|
||||||
|
}
|
||||||
|
|
||||||
|
code := str[start:idx]
|
||||||
|
newState := interpretCode(code, state)
|
||||||
|
if code == "\n" || !newState.equals(state) {
|
||||||
if state != nil {
|
if state != nil {
|
||||||
// Update last offset
|
// Update last offset
|
||||||
(&offsets[len(offsets)-1]).offset[1] = int32(utf8.RuneCount(output.Bytes()))
|
(&offsets[len(offsets)-1]).offset[1] = int32(runeCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
if code == "\n" {
|
||||||
|
output.WriteRune('\n')
|
||||||
|
runeCount++
|
||||||
|
// Full-background marker
|
||||||
|
if newState.lbg >= 0 {
|
||||||
|
marker := newState
|
||||||
|
marker.attr |= tui.FullBg
|
||||||
|
offsets = append(offsets, ansiOffset{
|
||||||
|
[2]int32{int32(runeCount), int32(runeCount)},
|
||||||
|
marker,
|
||||||
|
})
|
||||||
|
// Reset the full-line background color
|
||||||
|
newState.lbg = -1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if newState.colored() {
|
if newState.colored() {
|
||||||
// Append new offset
|
// Append new offset
|
||||||
state = newState
|
if pstate == nil {
|
||||||
newLen := int32(utf8.RuneCount(output.Bytes()))
|
pstate = &ansiState{}
|
||||||
offsets = append(offsets, ansiOffset{[2]int32{newLen, newLen}, *state})
|
}
|
||||||
|
*pstate = newState
|
||||||
|
state = pstate
|
||||||
|
offsets = append(offsets, ansiOffset{
|
||||||
|
[2]int32{int32(runeCount), int32(runeCount)},
|
||||||
|
newState,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
// Discard state
|
// Discard state
|
||||||
state = nil
|
state = nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
idx = offset[1]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
rest := str[idx:]
|
var rest string
|
||||||
if len(rest) > 0 {
|
var trimmed string
|
||||||
|
if prevIdx == 0 {
|
||||||
|
// No ANSI code found
|
||||||
|
rest = str
|
||||||
|
trimmed = str
|
||||||
|
} else {
|
||||||
|
rest = str[prevIdx:]
|
||||||
output.WriteString(rest)
|
output.WriteString(rest)
|
||||||
if state != nil {
|
trimmed = output.String()
|
||||||
// Update last offset
|
|
||||||
(&offsets[len(offsets)-1]).offset[1] = int32(utf8.RuneCount(output.Bytes()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if proc != nil {
|
if proc != nil {
|
||||||
proc(rest, state)
|
proc(rest, state)
|
||||||
}
|
}
|
||||||
if len(offsets) == 0 {
|
if len(offsets) > 0 {
|
||||||
return output.String(), nil, state
|
if len(rest) > 0 && state != nil {
|
||||||
|
// Update last offset
|
||||||
|
runeCount += utf8.RuneCountInString(rest)
|
||||||
|
(&offsets[len(offsets)-1]).offset[1] = int32(runeCount)
|
||||||
|
}
|
||||||
|
// Return a copy of the offsets slice
|
||||||
|
a := make([]ansiOffset, len(offsets))
|
||||||
|
copy(a, offsets)
|
||||||
|
return trimmed, &a, state
|
||||||
}
|
}
|
||||||
return output.String(), &offsets, state
|
return trimmed, nil, state
|
||||||
}
|
}
|
||||||
|
|
||||||
func interpretCode(ansiCode string, prevState *ansiState) *ansiState {
|
func parseAnsiCode(s string) (int, string) {
|
||||||
// State
|
var remaining string
|
||||||
var state *ansiState
|
var i int
|
||||||
|
// Faster than strings.IndexAny(";:")
|
||||||
|
i = strings.IndexByte(s, ';')
|
||||||
|
if i < 0 {
|
||||||
|
i = strings.IndexByte(s, ':')
|
||||||
|
}
|
||||||
|
if i >= 0 {
|
||||||
|
remaining = s[i+1:]
|
||||||
|
s = s[:i]
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(s) > 0 {
|
||||||
|
// Inlined version of strconv.Atoi() that only handles positive
|
||||||
|
// integers and does not allocate on error.
|
||||||
|
code := 0
|
||||||
|
for _, ch := range stringBytes(s) {
|
||||||
|
ch -= '0'
|
||||||
|
if ch > 9 {
|
||||||
|
return -1, remaining
|
||||||
|
}
|
||||||
|
code = code*10 + int(ch)
|
||||||
|
}
|
||||||
|
return code, remaining
|
||||||
|
}
|
||||||
|
|
||||||
|
return -1, remaining
|
||||||
|
}
|
||||||
|
|
||||||
|
func interpretCode(ansiCode string, prevState *ansiState) ansiState {
|
||||||
|
if ansiCode == "\n" {
|
||||||
|
if prevState != nil {
|
||||||
|
return *prevState
|
||||||
|
}
|
||||||
|
return ansiState{-1, -1, 0, -1, nil}
|
||||||
|
}
|
||||||
|
|
||||||
|
var state ansiState
|
||||||
if prevState == nil {
|
if prevState == nil {
|
||||||
state = &ansiState{-1, -1, 0}
|
state = ansiState{-1, -1, 0, -1, nil}
|
||||||
} else {
|
} else {
|
||||||
state = &ansiState{prevState.fg, prevState.bg, prevState.attr}
|
state = ansiState{prevState.fg, prevState.bg, prevState.attr, prevState.lbg, prevState.url}
|
||||||
}
|
}
|
||||||
if ansiCode[0] != '\x1b' || ansiCode[1] != '[' || ansiCode[len(ansiCode)-1] != 'm' {
|
if ansiCode[0] != '\x1b' || ansiCode[1] != '[' || ansiCode[len(ansiCode)-1] != 'm' {
|
||||||
|
if prevState != nil && (strings.HasSuffix(ansiCode, "0K") || strings.HasSuffix(ansiCode, "[K")) {
|
||||||
|
state.lbg = prevState.bg
|
||||||
|
} else if strings.HasPrefix(ansiCode, "\x1b]8;") && (strings.HasSuffix(ansiCode, "\x1b\\") || strings.HasSuffix(ansiCode, "\a")) {
|
||||||
|
stLen := 2
|
||||||
|
if strings.HasSuffix(ansiCode, "\a") {
|
||||||
|
stLen = 1
|
||||||
|
}
|
||||||
|
// "\x1b]8;;\x1b\\" or "\x1b]8;;\a"
|
||||||
|
if len(ansiCode) == 5+stLen && ansiCode[4] == ';' {
|
||||||
|
state.url = nil
|
||||||
|
} else if paramsEnd := strings.IndexRune(ansiCode[4:], ';'); paramsEnd >= 0 {
|
||||||
|
params := ansiCode[4 : 4+paramsEnd]
|
||||||
|
uri := ansiCode[5+paramsEnd : len(ansiCode)-stLen]
|
||||||
|
state.url = &url{uri: uri, params: params}
|
||||||
|
}
|
||||||
|
}
|
||||||
return state
|
return state
|
||||||
}
|
}
|
||||||
|
|
||||||
ptr := &state.fg
|
reset := func() {
|
||||||
state256 := 0
|
|
||||||
|
|
||||||
init := func() {
|
|
||||||
state.fg = -1
|
state.fg = -1
|
||||||
state.bg = -1
|
state.bg = -1
|
||||||
state.attr = 0
|
state.attr = 0
|
||||||
state256 = 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ansiCode = ansiCode[2 : len(ansiCode)-1]
|
if len(ansiCode) <= 3 {
|
||||||
if len(ansiCode) == 0 {
|
reset()
|
||||||
init()
|
return state
|
||||||
}
|
}
|
||||||
for _, code := range strings.Split(ansiCode, ";") {
|
ansiCode = ansiCode[2 : len(ansiCode)-1]
|
||||||
if num, err := strconv.Atoi(code); err == nil {
|
|
||||||
|
state256 := 0
|
||||||
|
ptr := &state.fg
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for len(ansiCode) != 0 {
|
||||||
|
var num int
|
||||||
|
if num, ansiCode = parseAnsiCode(ansiCode); num != -1 {
|
||||||
|
count++
|
||||||
switch state256 {
|
switch state256 {
|
||||||
case 0:
|
case 0:
|
||||||
switch num {
|
switch num {
|
||||||
@@ -154,8 +447,24 @@ func interpretCode(ansiCode string, prevState *ansiState) *ansiState {
|
|||||||
state.attr = state.attr | tui.Blink
|
state.attr = state.attr | tui.Blink
|
||||||
case 7:
|
case 7:
|
||||||
state.attr = state.attr | tui.Reverse
|
state.attr = state.attr | tui.Reverse
|
||||||
|
case 9:
|
||||||
|
state.attr = state.attr | tui.StrikeThrough
|
||||||
|
case 22:
|
||||||
|
state.attr = state.attr &^ tui.Bold
|
||||||
|
state.attr = state.attr &^ tui.Dim
|
||||||
|
case 23: // tput rmso
|
||||||
|
state.attr = state.attr &^ tui.Italic
|
||||||
|
case 24: // tput rmul
|
||||||
|
state.attr = state.attr &^ tui.Underline
|
||||||
|
case 25:
|
||||||
|
state.attr = state.attr &^ tui.Blink
|
||||||
|
case 27:
|
||||||
|
state.attr = state.attr &^ tui.Reverse
|
||||||
|
case 29:
|
||||||
|
state.attr = state.attr &^ tui.StrikeThrough
|
||||||
case 0:
|
case 0:
|
||||||
init()
|
reset()
|
||||||
|
state256 = 0
|
||||||
default:
|
default:
|
||||||
if num >= 30 && num <= 37 {
|
if num >= 30 && num <= 37 {
|
||||||
state.fg = tui.Color(num - 30)
|
state.fg = tui.Color(num - 30)
|
||||||
@@ -191,6 +500,12 @@ func interpretCode(ansiCode string, prevState *ansiState) *ansiState {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Empty sequence: reset
|
||||||
|
if count == 0 {
|
||||||
|
reset()
|
||||||
|
}
|
||||||
|
|
||||||
if state256 > 0 {
|
if state256 > 0 {
|
||||||
*ptr = -1
|
*ptr = -1
|
||||||
}
|
}
|
||||||
|
|||||||
301
src/ansi_test.go
301
src/ansi_test.go
@@ -1,12 +1,192 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"math/rand"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/tui"
|
"github.com/junegunn/fzf/src/tui"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// The following regular expression will include not all but most of the
|
||||||
|
// frequently used ANSI sequences. This regex is used as a reference for
|
||||||
|
// testing nextAnsiEscapeSequence().
|
||||||
|
//
|
||||||
|
// References:
|
||||||
|
// - https://github.com/gnachman/iTerm2
|
||||||
|
// - https://web.archive.org/web/20090204053813/http://ascii-table.com/ansi-escape-sequences.php
|
||||||
|
// (archived from http://ascii-table.com/ansi-escape-sequences.php)
|
||||||
|
// - https://web.archive.org/web/20090227051140/http://ascii-table.com/ansi-escape-sequences-vt-100.php
|
||||||
|
// (archived from http://ascii-table.com/ansi-escape-sequences-vt-100.php)
|
||||||
|
// - http://tldp.org/HOWTO/Bash-Prompt-HOWTO/x405.html
|
||||||
|
// - https://invisible-island.net/xterm/ctlseqs/ctlseqs.html
|
||||||
|
var ansiRegexReference = regexp.MustCompile("(?:\x1b[\\[()][0-9;:]*[a-zA-Z@]|\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)|\x1b.|[\x0e\x0f]|.\x08|\n)")
|
||||||
|
|
||||||
|
func testParserReference(t testing.TB, str string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
toSlice := func(start, end int) []int {
|
||||||
|
if start == -1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return []int{start, end}
|
||||||
|
}
|
||||||
|
|
||||||
|
s := str
|
||||||
|
for i := 0; ; i++ {
|
||||||
|
got := toSlice(nextAnsiEscapeSequence(s))
|
||||||
|
exp := ansiRegexReference.FindStringIndex(s)
|
||||||
|
|
||||||
|
equal := len(got) == len(exp)
|
||||||
|
if equal {
|
||||||
|
for i := 0; i < len(got); i++ {
|
||||||
|
if got[i] != exp[i] {
|
||||||
|
equal = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !equal {
|
||||||
|
var exps, gots []rune
|
||||||
|
if len(got) == 2 {
|
||||||
|
gots = []rune(s[got[0]:got[1]])
|
||||||
|
}
|
||||||
|
if len(exp) == 2 {
|
||||||
|
exps = []rune(s[exp[0]:exp[1]])
|
||||||
|
}
|
||||||
|
t.Errorf("%d: %q: got: %v (%q) want: %v (%q)", i, s, got, gots, exp, exps)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(exp) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s = s[exp[1]:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNextAnsiEscapeSequence(t *testing.T) {
|
||||||
|
testStrs := []string{
|
||||||
|
"\x1b[0mhello world",
|
||||||
|
"\x1b[1mhello world",
|
||||||
|
"椙\x1b[1m椙",
|
||||||
|
"椙\x1b[1椙m椙",
|
||||||
|
"\x1b[1mhello \x1b[mw\x1b7o\x1b8r\x1b(Bl\x1b[2@d",
|
||||||
|
"\x1b[1mhello \x1b[Kworld",
|
||||||
|
"hello \x1b[34;45;1mworld",
|
||||||
|
"hello \x1b[34;45;1mwor\x1b[34;45;1mld",
|
||||||
|
"hello \x1b[34;45;1mwor\x1b[0mld",
|
||||||
|
"hello \x1b[34;48;5;233;1mwo\x1b[38;5;161mr\x1b[0ml\x1b[38;5;161md",
|
||||||
|
"hello \x1b[38;5;38;48;5;48;1mwor\x1b[38;5;48;48;5;38ml\x1b[0md",
|
||||||
|
"hello \x1b[32;1mworld",
|
||||||
|
"hello world",
|
||||||
|
"hello \x1b[0;38;5;200;48;5;100mworld",
|
||||||
|
"\x1b椙",
|
||||||
|
"椙\x08",
|
||||||
|
"\n\x08",
|
||||||
|
"X\x08",
|
||||||
|
"",
|
||||||
|
"\x1b]4;3;rgb:aa/bb/cc\x07 ",
|
||||||
|
"\x1b]4;3;rgb:aa/bb/cc\x1b\\ ",
|
||||||
|
ansiBenchmarkString,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, s := range testStrs {
|
||||||
|
testParserReference(t, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNextAnsiEscapeSequence_Fuzz_Modified(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("short test")
|
||||||
|
}
|
||||||
|
|
||||||
|
testStrs := []string{
|
||||||
|
"\x1b[0mhello world",
|
||||||
|
"\x1b[1mhello world",
|
||||||
|
"椙\x1b[1m椙",
|
||||||
|
"椙\x1b[1椙m椙",
|
||||||
|
"\x1b[1mhello \x1b[mw\x1b7o\x1b8r\x1b(Bl\x1b[2@d",
|
||||||
|
"\x1b[1mhello \x1b[Kworld",
|
||||||
|
"hello \x1b[34;45;1mworld",
|
||||||
|
"hello \x1b[34;45;1mwor\x1b[34;45;1mld",
|
||||||
|
"hello \x1b[34;45;1mwor\x1b[0mld",
|
||||||
|
"hello \x1b[34;48;5;233;1mwo\x1b[38;5;161mr\x1b[0ml\x1b[38;5;161md",
|
||||||
|
"hello \x1b[38;5;38;48;5;48;1mwor\x1b[38;5;48;48;5;38ml\x1b[0md",
|
||||||
|
"hello \x1b[32;1mworld",
|
||||||
|
"hello world",
|
||||||
|
"hello \x1b[0;38;5;200;48;5;100mworld",
|
||||||
|
ansiBenchmarkString,
|
||||||
|
}
|
||||||
|
|
||||||
|
replacementBytes := [...]rune{'\x0e', '\x0f', '\x1b', '\x08'}
|
||||||
|
|
||||||
|
modifyString := func(s string, rr *rand.Rand) string {
|
||||||
|
n := rr.Intn(len(s))
|
||||||
|
b := []rune(s)
|
||||||
|
for ; n >= 0 && len(b) != 0; n-- {
|
||||||
|
i := rr.Intn(len(b))
|
||||||
|
switch x := rr.Intn(4); x {
|
||||||
|
case 0:
|
||||||
|
b = append(b[:i], b[i+1:]...)
|
||||||
|
case 1:
|
||||||
|
j := rr.Intn(len(replacementBytes) - 1)
|
||||||
|
b[i] = replacementBytes[j]
|
||||||
|
case 2:
|
||||||
|
x := rune(rr.Intn(utf8.MaxRune))
|
||||||
|
for !utf8.ValidRune(x) {
|
||||||
|
x = rune(rr.Intn(utf8.MaxRune))
|
||||||
|
}
|
||||||
|
b[i] = x
|
||||||
|
case 3:
|
||||||
|
b[i] = rune(rr.Intn(utf8.MaxRune)) // potentially invalid
|
||||||
|
default:
|
||||||
|
t.Fatalf("unsupported value: %d", x)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
rr := rand.New(rand.NewSource(1))
|
||||||
|
for _, s := range testStrs {
|
||||||
|
for i := 1_000; i >= 0; i-- {
|
||||||
|
testParserReference(t, modifyString(s, rr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNextAnsiEscapeSequence_Fuzz_Random(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("short test")
|
||||||
|
}
|
||||||
|
|
||||||
|
randomString := func(rr *rand.Rand) string {
|
||||||
|
numChars := rand.Intn(50)
|
||||||
|
codePoints := make([]rune, numChars)
|
||||||
|
for i := 0; i < len(codePoints); i++ {
|
||||||
|
var r rune
|
||||||
|
for n := 0; n < 1000; n++ {
|
||||||
|
r = rune(rr.Intn(utf8.MaxRune))
|
||||||
|
// Allow 10% of runes to be invalid
|
||||||
|
if utf8.ValidRune(r) || rr.Float64() < 0.10 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
codePoints[i] = r
|
||||||
|
}
|
||||||
|
return string(codePoints)
|
||||||
|
}
|
||||||
|
|
||||||
|
rr := rand.New(rand.NewSource(1))
|
||||||
|
for i := 0; i < 100_000; i++ {
|
||||||
|
testParserReference(t, randomString(rr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestExtractColor(t *testing.T) {
|
func TestExtractColor(t *testing.T) {
|
||||||
assert := func(offset ansiOffset, b int32, e int32, fg tui.Color, bg tui.Color, bold bool) {
|
assert := func(offset ansiOffset, b int32, e int32, fg tui.Color, bg tui.Color, bold bool) {
|
||||||
var attr tui.Attr
|
var attr tui.Attr
|
||||||
@@ -26,9 +206,9 @@ func TestExtractColor(t *testing.T) {
|
|||||||
output, ansiOffsets, newState := extractColor(src, state, nil)
|
output, ansiOffsets, newState := extractColor(src, state, nil)
|
||||||
state = newState
|
state = newState
|
||||||
if output != "hello world" {
|
if output != "hello world" {
|
||||||
t.Errorf("Invalid output: %s %s", output, []rune(output))
|
t.Errorf("Invalid output: %s %v", output, []rune(output))
|
||||||
}
|
}
|
||||||
fmt.Println(src, ansiOffsets, clean)
|
t.Log(src, ansiOffsets, clean)
|
||||||
assertion(ansiOffsets, state)
|
assertion(ansiOffsets, state)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,4 +335,119 @@ func TestExtractColor(t *testing.T) {
|
|||||||
assert((*offsets)[0], 0, 6, 2, -1, true)
|
assert((*offsets)[0], 0, 6, 2, -1, true)
|
||||||
assert((*offsets)[1], 6, 11, 200, 100, false)
|
assert((*offsets)[1], 6, 11, 200, 100, false)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
state = nil
|
||||||
|
var color24 tui.Color = (1 << 24) + (180 << 16) + (190 << 8) + 254
|
||||||
|
src = "\x1b[1mhello \x1b[22;1;38:2:180:190:254mworld"
|
||||||
|
check(func(offsets *[]ansiOffset, state *ansiState) {
|
||||||
|
if len(*offsets) != 2 {
|
||||||
|
t.Fail()
|
||||||
|
}
|
||||||
|
if state.fg != color24 || state.attr != 1 {
|
||||||
|
t.Fail()
|
||||||
|
}
|
||||||
|
assert((*offsets)[0], 0, 6, -1, -1, true)
|
||||||
|
assert((*offsets)[1], 6, 11, color24, -1, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
src = "\x1b]133;A\x1b\\hello \x1b]133;C\x1b\\world"
|
||||||
|
check(func(offsets *[]ansiOffset, state *ansiState) {
|
||||||
|
if len(*offsets) != 1 {
|
||||||
|
t.Fail()
|
||||||
|
}
|
||||||
|
assert((*offsets)[0], 0, 11, color24, -1, true)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAnsiCodeStringConversion(t *testing.T) {
|
||||||
|
assert := func(code string, prevState *ansiState, expected string) {
|
||||||
|
state := interpretCode(code, prevState)
|
||||||
|
if expected != state.ToString() {
|
||||||
|
t.Errorf("expected: %s, actual: %s",
|
||||||
|
strings.ReplaceAll(expected, "\x1b[", "\\x1b["),
|
||||||
|
strings.ReplaceAll(state.ToString(), "\x1b[", "\\x1b["))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert("\x1b[m", nil, "")
|
||||||
|
assert("\x1b[m", &ansiState{attr: tui.Blink, lbg: -1}, "")
|
||||||
|
assert("\x1b[0m", &ansiState{fg: 4, bg: 4, lbg: -1}, "")
|
||||||
|
assert("\x1b[;m", &ansiState{fg: 4, bg: 4, lbg: -1}, "")
|
||||||
|
assert("\x1b[;;m", &ansiState{fg: 4, bg: 4, lbg: -1}, "")
|
||||||
|
|
||||||
|
assert("\x1b[31m", nil, "\x1b[31;49m")
|
||||||
|
assert("\x1b[41m", nil, "\x1b[39;41m")
|
||||||
|
|
||||||
|
assert("\x1b[92m", nil, "\x1b[92;49m")
|
||||||
|
assert("\x1b[102m", nil, "\x1b[39;102m")
|
||||||
|
|
||||||
|
assert("\x1b[31m", &ansiState{fg: 4, bg: 4, lbg: -1}, "\x1b[31;44m")
|
||||||
|
assert("\x1b[1;2;31m", &ansiState{fg: 2, bg: -1, attr: tui.Reverse, lbg: -1}, "\x1b[1;2;7;31;49m")
|
||||||
|
assert("\x1b[38;5;100;48;5;200m", nil, "\x1b[38;5;100;48;5;200m")
|
||||||
|
assert("\x1b[38:5:100:48:5:200m", nil, "\x1b[38;5;100;48;5;200m")
|
||||||
|
assert("\x1b[48;5;100;38;5;200m", nil, "\x1b[38;5;200;48;5;100m")
|
||||||
|
assert("\x1b[48;5;100;38;2;10;20;30;1m", nil, "\x1b[1;38;2;10;20;30;48;5;100m")
|
||||||
|
assert("\x1b[48;5;100;38;2;10;20;30;7m",
|
||||||
|
&ansiState{attr: tui.Dim | tui.Italic, fg: 1, bg: 1},
|
||||||
|
"\x1b[2;3;7;38;2;10;20;30;48;5;100m")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseAnsiCode(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
In, Exp string
|
||||||
|
N int
|
||||||
|
}{
|
||||||
|
{"123", "", 123},
|
||||||
|
{"1a", "", -1},
|
||||||
|
{"1a;12", "12", -1},
|
||||||
|
{"12;a", "a", 12},
|
||||||
|
{"-2", "", -1},
|
||||||
|
}
|
||||||
|
for _, x := range tests {
|
||||||
|
n, s := parseAnsiCode(x.In)
|
||||||
|
if n != x.N || s != x.Exp {
|
||||||
|
t.Fatalf("%q: got: (%d %q) want: (%d %q)", x.In, n, s, x.N, x.Exp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// kernel/bpf/preload/iterators/README
|
||||||
|
const ansiBenchmarkString = "\x1b[38;5;81m\x1b[01;31m\x1b[Kkernel/\x1b[0m\x1b[38:5:81mbpf/" +
|
||||||
|
"\x1b[0m\x1b[38:5:81mpreload/\x1b[0m\x1b[38;5;81miterators/" +
|
||||||
|
"\x1b[0m\x1b[38:5:149mMakefile\x1b[m\x1b[K\x1b[0m"
|
||||||
|
|
||||||
|
func BenchmarkNextAnsiEscapeSequence(b *testing.B) {
|
||||||
|
b.SetBytes(int64(len(ansiBenchmarkString)))
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
s := ansiBenchmarkString
|
||||||
|
for {
|
||||||
|
_, o := nextAnsiEscapeSequence(s)
|
||||||
|
if o == -1 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
s = s[o:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Baseline test to compare the speed of nextAnsiEscapeSequence() to the
|
||||||
|
// previously used regex based implementation.
|
||||||
|
func BenchmarkNextAnsiEscapeSequence_Regex(b *testing.B) {
|
||||||
|
b.SetBytes(int64(len(ansiBenchmarkString)))
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
s := ansiBenchmarkString
|
||||||
|
for {
|
||||||
|
a := ansiRegexReference.FindStringIndex(s)
|
||||||
|
if len(a) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
s = s[a[1]:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkExtractColor(b *testing.B) {
|
||||||
|
b.SetBytes(int64(len(ansiBenchmarkString)))
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
extractColor(ansiBenchmarkString, nil, nil)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
60
src/cache.go
60
src/cache.go
@@ -3,7 +3,7 @@ package fzf
|
|||||||
import "sync"
|
import "sync"
|
||||||
|
|
||||||
// queryCache associates strings to lists of items
|
// queryCache associates strings to lists of items
|
||||||
type queryCache map[string][]*Result
|
type queryCache map[string][]Result
|
||||||
|
|
||||||
// ChunkCache associates Chunk and query string to lists of items
|
// ChunkCache associates Chunk and query string to lists of items
|
||||||
type ChunkCache struct {
|
type ChunkCache struct {
|
||||||
@@ -12,12 +12,26 @@ type ChunkCache struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewChunkCache returns a new ChunkCache
|
// NewChunkCache returns a new ChunkCache
|
||||||
func NewChunkCache() ChunkCache {
|
func NewChunkCache() *ChunkCache {
|
||||||
return ChunkCache{sync.Mutex{}, make(map[*Chunk]*queryCache)}
|
return &ChunkCache{sync.Mutex{}, make(map[*Chunk]*queryCache)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cc *ChunkCache) Clear() {
|
||||||
|
cc.mutex.Lock()
|
||||||
|
cc.cache = make(map[*Chunk]*queryCache)
|
||||||
|
cc.mutex.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cc *ChunkCache) retire(chunk ...*Chunk) {
|
||||||
|
cc.mutex.Lock()
|
||||||
|
for _, c := range chunk {
|
||||||
|
delete(cc.cache, c)
|
||||||
|
}
|
||||||
|
cc.mutex.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add adds the list to the cache
|
// Add adds the list to the cache
|
||||||
func (cc *ChunkCache) Add(chunk *Chunk, key string, list []*Result) {
|
func (cc *ChunkCache) Add(chunk *Chunk, key string, list []Result) {
|
||||||
if len(key) == 0 || !chunk.IsFull() || len(list) > queryCacheMax {
|
if len(key) == 0 || !chunk.IsFull() || len(list) > queryCacheMax {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -33,10 +47,10 @@ func (cc *ChunkCache) Add(chunk *Chunk, key string, list []*Result) {
|
|||||||
(*qc)[key] = list
|
(*qc)[key] = list
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find is called to lookup ChunkCache
|
// Lookup is called to lookup ChunkCache
|
||||||
func (cc *ChunkCache) Find(chunk *Chunk, key string) ([]*Result, bool) {
|
func (cc *ChunkCache) Lookup(chunk *Chunk, key string) []Result {
|
||||||
if len(key) == 0 || !chunk.IsFull() {
|
if len(key) == 0 || !chunk.IsFull() {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
cc.mutex.Lock()
|
cc.mutex.Lock()
|
||||||
@@ -46,8 +60,36 @@ func (cc *ChunkCache) Find(chunk *Chunk, key string) ([]*Result, bool) {
|
|||||||
if ok {
|
if ok {
|
||||||
list, ok := (*qc)[key]
|
list, ok := (*qc)[key]
|
||||||
if ok {
|
if ok {
|
||||||
return list, true
|
return list
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, false
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cc *ChunkCache) Search(chunk *Chunk, key string) []Result {
|
||||||
|
if len(key) == 0 || !chunk.IsFull() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
cc.mutex.Lock()
|
||||||
|
defer cc.mutex.Unlock()
|
||||||
|
|
||||||
|
qc, ok := cc.cache[chunk]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := 1; idx < len(key); idx++ {
|
||||||
|
// [---------| ] | [ |---------]
|
||||||
|
// [--------| ] | [ |--------]
|
||||||
|
// [-------| ] | [ |-------]
|
||||||
|
prefix := key[:len(key)-idx]
|
||||||
|
suffix := key[idx:]
|
||||||
|
for _, substr := range [2]string{prefix, suffix} {
|
||||||
|
if cached, found := (*qc)[substr]; found {
|
||||||
|
return cached
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,37 +4,36 @@ import "testing"
|
|||||||
|
|
||||||
func TestChunkCache(t *testing.T) {
|
func TestChunkCache(t *testing.T) {
|
||||||
cache := NewChunkCache()
|
cache := NewChunkCache()
|
||||||
chunk2 := make(Chunk, chunkSize)
|
|
||||||
chunk1p := &Chunk{}
|
chunk1p := &Chunk{}
|
||||||
chunk2p := &chunk2
|
chunk2p := &Chunk{count: chunkSize}
|
||||||
items1 := []*Result{&Result{}}
|
items1 := []Result{{}}
|
||||||
items2 := []*Result{&Result{}, &Result{}}
|
items2 := []Result{{}, {}}
|
||||||
cache.Add(chunk1p, "foo", items1)
|
cache.Add(chunk1p, "foo", items1)
|
||||||
cache.Add(chunk2p, "foo", items1)
|
cache.Add(chunk2p, "foo", items1)
|
||||||
cache.Add(chunk2p, "bar", items2)
|
cache.Add(chunk2p, "bar", items2)
|
||||||
|
|
||||||
{ // chunk1 is not full
|
{ // chunk1 is not full
|
||||||
cached, found := cache.Find(chunk1p, "foo")
|
cached := cache.Lookup(chunk1p, "foo")
|
||||||
if found {
|
if cached != nil {
|
||||||
t.Error("Cached disabled for non-empty chunks", found, cached)
|
t.Error("Cached disabled for non-empty chunks", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
cached, found := cache.Find(chunk2p, "foo")
|
cached := cache.Lookup(chunk2p, "foo")
|
||||||
if !found || len(cached) != 1 {
|
if cached == nil || len(cached) != 1 {
|
||||||
t.Error("Expected 1 item cached", found, cached)
|
t.Error("Expected 1 item cached", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
cached, found := cache.Find(chunk2p, "bar")
|
cached := cache.Lookup(chunk2p, "bar")
|
||||||
if !found || len(cached) != 2 {
|
if cached == nil || len(cached) != 2 {
|
||||||
t.Error("Expected 2 items cached", found, cached)
|
t.Error("Expected 2 items cached", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
cached, found := cache.Find(chunk1p, "foobar")
|
cached := cache.Lookup(chunk1p, "foobar")
|
||||||
if found {
|
if cached != nil {
|
||||||
t.Error("Expected 0 item cached", found, cached)
|
t.Error("Expected 0 item cached", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
118
src/chunklist.go
118
src/chunklist.go
@@ -2,34 +2,35 @@ package fzf
|
|||||||
|
|
||||||
import "sync"
|
import "sync"
|
||||||
|
|
||||||
// Chunk is a list of Item pointers whose size has the upper limit of chunkSize
|
// Chunk is a list of Items whose size has the upper limit of chunkSize
|
||||||
type Chunk []*Item // >>> []Item
|
type Chunk struct {
|
||||||
|
items [chunkSize]Item
|
||||||
|
count int
|
||||||
|
}
|
||||||
|
|
||||||
// ItemBuilder is a closure type that builds Item object from a pointer to a
|
// ItemBuilder is a closure type that builds Item object from byte array
|
||||||
// string and an integer
|
type ItemBuilder func(*Item, []byte) bool
|
||||||
type ItemBuilder func([]byte, int) *Item
|
|
||||||
|
|
||||||
// ChunkList is a list of Chunks
|
// ChunkList is a list of Chunks
|
||||||
type ChunkList struct {
|
type ChunkList struct {
|
||||||
chunks []*Chunk
|
chunks []*Chunk
|
||||||
count int
|
|
||||||
mutex sync.Mutex
|
mutex sync.Mutex
|
||||||
trans ItemBuilder
|
trans ItemBuilder
|
||||||
|
cache *ChunkCache
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewChunkList returns a new ChunkList
|
// NewChunkList returns a new ChunkList
|
||||||
func NewChunkList(trans ItemBuilder) *ChunkList {
|
func NewChunkList(cache *ChunkCache, trans ItemBuilder) *ChunkList {
|
||||||
return &ChunkList{
|
return &ChunkList{
|
||||||
chunks: []*Chunk{},
|
chunks: []*Chunk{},
|
||||||
count: 0,
|
|
||||||
mutex: sync.Mutex{},
|
mutex: sync.Mutex{},
|
||||||
trans: trans}
|
trans: trans,
|
||||||
|
cache: cache}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Chunk) push(trans ItemBuilder, data []byte, index int) bool {
|
func (c *Chunk) push(trans ItemBuilder, data []byte) bool {
|
||||||
item := trans(data, index)
|
if trans(&c.items[c.count], data) {
|
||||||
if item != nil {
|
c.count++
|
||||||
*c = append(*c, item)
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
@@ -37,7 +38,14 @@ func (c *Chunk) push(trans ItemBuilder, data []byte, index int) bool {
|
|||||||
|
|
||||||
// IsFull returns true if the Chunk is full
|
// IsFull returns true if the Chunk is full
|
||||||
func (c *Chunk) IsFull() bool {
|
func (c *Chunk) IsFull() bool {
|
||||||
return len(*c) == chunkSize
|
return c.count == chunkSize
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Chunk) lastIndex(minValue int32) int32 {
|
||||||
|
if c.count == 0 {
|
||||||
|
return minValue
|
||||||
|
}
|
||||||
|
return c.items[c.count-1].Index() + 1 // Exclusive
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cl *ChunkList) lastChunk() *Chunk {
|
func (cl *ChunkList) lastChunk() *Chunk {
|
||||||
@@ -49,45 +57,85 @@ func CountItems(cs []*Chunk) int {
|
|||||||
if len(cs) == 0 {
|
if len(cs) == 0 {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
return chunkSize*(len(cs)-1) + len(*(cs[len(cs)-1]))
|
if len(cs) == 1 {
|
||||||
|
return cs[0].count
|
||||||
|
}
|
||||||
|
|
||||||
|
// First chunk might not be full due to --tail=N
|
||||||
|
return cs[0].count + chunkSize*(len(cs)-2) + cs[len(cs)-1].count
|
||||||
}
|
}
|
||||||
|
|
||||||
// Push adds the item to the list
|
// Push adds the item to the list
|
||||||
func (cl *ChunkList) Push(data []byte) bool {
|
func (cl *ChunkList) Push(data []byte) bool {
|
||||||
cl.mutex.Lock()
|
cl.mutex.Lock()
|
||||||
defer cl.mutex.Unlock()
|
|
||||||
|
|
||||||
if len(cl.chunks) == 0 || cl.lastChunk().IsFull() {
|
if len(cl.chunks) == 0 || cl.lastChunk().IsFull() {
|
||||||
newChunk := Chunk(make([]*Item, 0, chunkSize))
|
cl.chunks = append(cl.chunks, &Chunk{})
|
||||||
cl.chunks = append(cl.chunks, &newChunk)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if cl.lastChunk().push(cl.trans, data, cl.count) {
|
ret := cl.lastChunk().push(cl.trans, data)
|
||||||
cl.count++
|
cl.mutex.Unlock()
|
||||||
return true
|
return ret
|
||||||
}
|
}
|
||||||
return false
|
|
||||||
|
// Clear clears the data
|
||||||
|
func (cl *ChunkList) Clear() {
|
||||||
|
cl.mutex.Lock()
|
||||||
|
cl.chunks = nil
|
||||||
|
cl.mutex.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Snapshot returns immutable snapshot of the ChunkList
|
// Snapshot returns immutable snapshot of the ChunkList
|
||||||
func (cl *ChunkList) Snapshot() ([]*Chunk, int) {
|
func (cl *ChunkList) Snapshot(tail int) ([]*Chunk, int, bool) {
|
||||||
cl.mutex.Lock()
|
cl.mutex.Lock()
|
||||||
defer cl.mutex.Unlock()
|
|
||||||
|
changed := false
|
||||||
|
if tail > 0 && CountItems(cl.chunks) > tail {
|
||||||
|
changed = true
|
||||||
|
// Find the number of chunks to keep
|
||||||
|
numChunks := 0
|
||||||
|
for left, i := tail, len(cl.chunks)-1; left > 0 && i >= 0; i-- {
|
||||||
|
numChunks++
|
||||||
|
left -= cl.chunks[i].count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy the chunks to keep
|
||||||
|
ret := make([]*Chunk, numChunks)
|
||||||
|
minIndex := len(cl.chunks) - numChunks
|
||||||
|
cl.cache.retire(cl.chunks[:minIndex]...)
|
||||||
|
copy(ret, cl.chunks[minIndex:])
|
||||||
|
|
||||||
|
for left, i := tail, len(ret)-1; i >= 0; i-- {
|
||||||
|
chunk := ret[i]
|
||||||
|
if chunk.count > left {
|
||||||
|
newChunk := *chunk
|
||||||
|
newChunk.count = left
|
||||||
|
oldCount := chunk.count
|
||||||
|
for i := 0; i < left; i++ {
|
||||||
|
newChunk.items[i] = chunk.items[oldCount-left+i]
|
||||||
|
}
|
||||||
|
ret[i] = &newChunk
|
||||||
|
cl.cache.retire(chunk)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
left -= chunk.count
|
||||||
|
}
|
||||||
|
cl.chunks = ret
|
||||||
|
}
|
||||||
|
|
||||||
ret := make([]*Chunk, len(cl.chunks))
|
ret := make([]*Chunk, len(cl.chunks))
|
||||||
copy(ret, cl.chunks)
|
copy(ret, cl.chunks)
|
||||||
|
|
||||||
// Duplicate the last chunk
|
// Duplicate the first and the last chunk
|
||||||
if cnt := len(ret); cnt > 0 {
|
if cnt := len(ret); cnt > 0 {
|
||||||
ret[cnt-1] = ret[cnt-1].dupe()
|
if tail > 0 && cnt > 1 {
|
||||||
|
newChunk := *ret[0]
|
||||||
|
ret[0] = &newChunk
|
||||||
|
}
|
||||||
|
newChunk := *ret[cnt-1]
|
||||||
|
ret[cnt-1] = &newChunk
|
||||||
}
|
}
|
||||||
return ret, cl.count
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Chunk) dupe() *Chunk {
|
cl.mutex.Unlock()
|
||||||
newChunk := make(Chunk, len(*c))
|
return ret, CountItems(ret), changed
|
||||||
for idx, ptr := range *c {
|
|
||||||
newChunk[idx] = ptr
|
|
||||||
}
|
|
||||||
return &newChunk
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,12 +11,13 @@ func TestChunkList(t *testing.T) {
|
|||||||
// FIXME global
|
// FIXME global
|
||||||
sortCriteria = []criterion{byScore, byLength}
|
sortCriteria = []criterion{byScore, byLength}
|
||||||
|
|
||||||
cl := NewChunkList(func(s []byte, i int) *Item {
|
cl := NewChunkList(NewChunkCache(), func(item *Item, s []byte) bool {
|
||||||
return &Item{text: util.ToChars(s), index: int32(i * 2)}
|
item.text = util.ToChars(s)
|
||||||
|
return true
|
||||||
})
|
})
|
||||||
|
|
||||||
// Snapshot
|
// Snapshot
|
||||||
snapshot, count := cl.Snapshot()
|
snapshot, count, _ := cl.Snapshot(0)
|
||||||
if len(snapshot) > 0 || count > 0 {
|
if len(snapshot) > 0 || count > 0 {
|
||||||
t.Error("Snapshot should be empty now")
|
t.Error("Snapshot should be empty now")
|
||||||
}
|
}
|
||||||
@@ -31,18 +32,18 @@ func TestChunkList(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// But the new snapshot should contain the added items
|
// But the new snapshot should contain the added items
|
||||||
snapshot, count = cl.Snapshot()
|
snapshot, count, _ = cl.Snapshot(0)
|
||||||
if len(snapshot) != 1 && count != 2 {
|
if len(snapshot) != 1 && count != 2 {
|
||||||
t.Error("Snapshot should not be empty now")
|
t.Error("Snapshot should not be empty now")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check the content of the ChunkList
|
// Check the content of the ChunkList
|
||||||
chunk1 := snapshot[0]
|
chunk1 := snapshot[0]
|
||||||
if len(*chunk1) != 2 {
|
if chunk1.count != 2 {
|
||||||
t.Error("Snapshot should contain only two items")
|
t.Error("Snapshot should contain only two items")
|
||||||
}
|
}
|
||||||
if (*chunk1)[0].text.ToString() != "hello" || (*chunk1)[0].index != 0 ||
|
if chunk1.items[0].text.ToString() != "hello" ||
|
||||||
(*chunk1)[1].text.ToString() != "world" || (*chunk1)[1].index != 2 {
|
chunk1.items[1].text.ToString() != "world" {
|
||||||
t.Error("Invalid data")
|
t.Error("Invalid data")
|
||||||
}
|
}
|
||||||
if chunk1.IsFull() {
|
if chunk1.IsFull() {
|
||||||
@@ -51,7 +52,7 @@ func TestChunkList(t *testing.T) {
|
|||||||
|
|
||||||
// Add more data
|
// Add more data
|
||||||
for i := 0; i < chunkSize*2; i++ {
|
for i := 0; i < chunkSize*2; i++ {
|
||||||
cl.Push([]byte(fmt.Sprintf("item %d", i)))
|
cl.Push(fmt.Appendf(nil, "item %d", i))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Previous snapshot should remain the same
|
// Previous snapshot should remain the same
|
||||||
@@ -60,20 +61,56 @@ func TestChunkList(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// New snapshot
|
// New snapshot
|
||||||
snapshot, count = cl.Snapshot()
|
snapshot, count, _ = cl.Snapshot(0)
|
||||||
if len(snapshot) != 3 || !snapshot[0].IsFull() ||
|
if len(snapshot) != 3 || !snapshot[0].IsFull() ||
|
||||||
!snapshot[1].IsFull() || snapshot[2].IsFull() || count != chunkSize*2+2 {
|
!snapshot[1].IsFull() || snapshot[2].IsFull() || count != chunkSize*2+2 {
|
||||||
t.Error("Expected two full chunks and one more chunk")
|
t.Error("Expected two full chunks and one more chunk")
|
||||||
}
|
}
|
||||||
if len(*snapshot[2]) != 2 {
|
if snapshot[2].count != 2 {
|
||||||
t.Error("Unexpected number of items")
|
t.Error("Unexpected number of items")
|
||||||
}
|
}
|
||||||
|
|
||||||
cl.Push([]byte("hello"))
|
cl.Push([]byte("hello"))
|
||||||
cl.Push([]byte("world"))
|
cl.Push([]byte("world"))
|
||||||
|
|
||||||
lastChunkCount := len(*snapshot[len(snapshot)-1])
|
lastChunkCount := snapshot[len(snapshot)-1].count
|
||||||
if lastChunkCount != 2 {
|
if lastChunkCount != 2 {
|
||||||
t.Error("Unexpected number of items:", lastChunkCount)
|
t.Error("Unexpected number of items:", lastChunkCount)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestChunkListTail(t *testing.T) {
|
||||||
|
cl := NewChunkList(NewChunkCache(), func(item *Item, s []byte) bool {
|
||||||
|
item.text = util.ToChars(s)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
total := chunkSize*2 + chunkSize/2
|
||||||
|
for i := 0; i < total; i++ {
|
||||||
|
cl.Push(fmt.Appendf(nil, "item %d", i))
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot, count, changed := cl.Snapshot(0)
|
||||||
|
assertCount := func(expected int, shouldChange bool) {
|
||||||
|
if count != expected || CountItems(snapshot) != expected {
|
||||||
|
t.Errorf("Unexpected count: %d (expected: %d)", count, expected)
|
||||||
|
}
|
||||||
|
if changed != shouldChange {
|
||||||
|
t.Error("Unexpected change status")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertCount(total, false)
|
||||||
|
|
||||||
|
tail := chunkSize + chunkSize/2
|
||||||
|
snapshot, count, changed = cl.Snapshot(tail)
|
||||||
|
assertCount(tail, true)
|
||||||
|
|
||||||
|
snapshot, count, changed = cl.Snapshot(tail)
|
||||||
|
assertCount(tail, false)
|
||||||
|
|
||||||
|
snapshot, count, changed = cl.Snapshot(0)
|
||||||
|
assertCount(tail, false)
|
||||||
|
|
||||||
|
tail = chunkSize / 2
|
||||||
|
snapshot, count, changed = cl.Snapshot(tail)
|
||||||
|
assertCount(tail, true)
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,27 +1,37 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"math"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// Current version
|
|
||||||
version = "0.16.3"
|
|
||||||
|
|
||||||
// Core
|
// Core
|
||||||
coordinatorDelayMax time.Duration = 100 * time.Millisecond
|
coordinatorDelayMax time.Duration = 100 * time.Millisecond
|
||||||
coordinatorDelayStep time.Duration = 10 * time.Millisecond
|
coordinatorDelayStep time.Duration = 10 * time.Millisecond
|
||||||
|
|
||||||
// Reader
|
// Reader
|
||||||
readerBufferSize = 64 * 1024
|
readerBufferSize = 64 * 1024
|
||||||
|
readerSlabSize = 128 * 1024
|
||||||
|
readerPollIntervalMin = 10 * time.Millisecond
|
||||||
|
readerPollIntervalStep = 5 * time.Millisecond
|
||||||
|
readerPollIntervalMax = 50 * time.Millisecond
|
||||||
|
|
||||||
// Terminal
|
// Terminal
|
||||||
initialDelay = 20 * time.Millisecond
|
initialDelay = 20 * time.Millisecond
|
||||||
initialDelayTac = 100 * time.Millisecond
|
initialDelayTac = 100 * time.Millisecond
|
||||||
spinnerDuration = 200 * time.Millisecond
|
spinnerDuration = 100 * time.Millisecond
|
||||||
maxPatternLength = 100
|
previewCancelWait = 500 * time.Millisecond
|
||||||
|
previewChunkDelay = 100 * time.Millisecond
|
||||||
|
previewDelayed = 500 * time.Millisecond
|
||||||
|
maxPatternLength = 1000
|
||||||
|
maxMulti = math.MaxInt32
|
||||||
|
|
||||||
|
// Background processes
|
||||||
|
maxBgProcesses = 30
|
||||||
|
maxBgProcessesPerAction = 3
|
||||||
|
|
||||||
// Matcher
|
// Matcher
|
||||||
numPartitionsMultiplier = 8
|
numPartitionsMultiplier = 8
|
||||||
@@ -56,12 +66,14 @@ const (
|
|||||||
EvtSearchProgress
|
EvtSearchProgress
|
||||||
EvtSearchFin
|
EvtSearchFin
|
||||||
EvtHeader
|
EvtHeader
|
||||||
EvtClose
|
EvtReady
|
||||||
|
EvtQuit
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
exitOk = 0
|
ExitOk = 0
|
||||||
exitNoMatch = 1
|
ExitNoMatch = 1
|
||||||
exitError = 2
|
ExitError = 2
|
||||||
exitInterrupt = 130
|
ExitBecome = 126
|
||||||
|
ExitInterrupt = 130
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
// +build !windows
|
|
||||||
|
|
||||||
package fzf
|
|
||||||
|
|
||||||
const (
|
|
||||||
// Reader
|
|
||||||
defaultCommand = `find -L . -path '*/\.*' -prune -o -type f -print -o -type l -print 2> /dev/null | sed s/^..//`
|
|
||||||
)
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
// +build windows
|
|
||||||
|
|
||||||
package fzf
|
|
||||||
|
|
||||||
const (
|
|
||||||
// Reader
|
|
||||||
defaultCommand = `dir /s/b`
|
|
||||||
)
|
|
||||||
504
src/core.go
504
src/core.go
@@ -1,35 +1,13 @@
|
|||||||
/*
|
// Package fzf implements fzf, a command-line fuzzy finder.
|
||||||
Package fzf implements fzf, a command-line fuzzy finder.
|
|
||||||
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2017 Junegunn Choi
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"maps"
|
||||||
"os"
|
"os"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/junegunn/fzf/src/tui"
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -42,16 +20,52 @@ Matcher -> EvtSearchFin -> Terminal (update list)
|
|||||||
Matcher -> EvtHeader -> Terminal (update header)
|
Matcher -> EvtHeader -> Terminal (update header)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
type revision struct {
|
||||||
|
major int
|
||||||
|
minor int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *revision) bumpMajor() {
|
||||||
|
r.major++
|
||||||
|
r.minor = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *revision) bumpMinor() {
|
||||||
|
r.minor++
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r revision) compatible(other revision) bool {
|
||||||
|
return r.major == other.major
|
||||||
|
}
|
||||||
|
|
||||||
// Run starts fzf
|
// Run starts fzf
|
||||||
func Run(opts *Options) {
|
func Run(opts *Options) (int, error) {
|
||||||
|
if opts.Filter == nil {
|
||||||
|
if opts.useTmux() {
|
||||||
|
return runTmux(os.Args, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
if needWinpty(opts) {
|
||||||
|
return runWinpty(os.Args, opts)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := postProcessOptions(opts); err != nil {
|
||||||
|
return ExitError, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer util.RunAtExitFuncs()
|
||||||
|
|
||||||
|
// Output channel given
|
||||||
|
if opts.Output != nil {
|
||||||
|
opts.Printer = func(str string) {
|
||||||
|
opts.Output <- str
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
sort := opts.Sort > 0
|
sort := opts.Sort > 0
|
||||||
sortCriteria = opts.Criteria
|
sortCriteria = opts.Criteria
|
||||||
|
|
||||||
if opts.Version {
|
|
||||||
fmt.Println(version)
|
|
||||||
os.Exit(exitOk)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Event channel
|
// Event channel
|
||||||
eventBox := util.NewEventBox()
|
eventBox := util.NewEventBox()
|
||||||
|
|
||||||
@@ -59,94 +73,166 @@ func Run(opts *Options) {
|
|||||||
ansiProcessor := func(data []byte) (util.Chars, *[]ansiOffset) {
|
ansiProcessor := func(data []byte) (util.Chars, *[]ansiOffset) {
|
||||||
return util.ToChars(data), nil
|
return util.ToChars(data), nil
|
||||||
}
|
}
|
||||||
ansiProcessorRunes := func(data []rune) (util.Chars, *[]ansiOffset) {
|
|
||||||
return util.RunesToChars(data), nil
|
var lineAnsiState, prevLineAnsiState *ansiState
|
||||||
}
|
|
||||||
if opts.Ansi {
|
if opts.Ansi {
|
||||||
if opts.Theme != nil {
|
ansiProcessor = func(data []byte) (util.Chars, *[]ansiOffset) {
|
||||||
var state *ansiState
|
prevLineAnsiState = lineAnsiState
|
||||||
ansiProcessor = func(data []byte) (util.Chars, *[]ansiOffset) {
|
trimmed, offsets, newState := extractColor(byteString(data), lineAnsiState, nil)
|
||||||
trimmed, offsets, newState := extractColor(string(data), state, nil)
|
lineAnsiState = newState
|
||||||
state = newState
|
|
||||||
return util.RunesToChars([]rune(trimmed)), offsets
|
// Full line background is found. Add a special marker.
|
||||||
|
if offsets != nil && newState != nil && len(*offsets) > 0 && newState.lbg >= 0 {
|
||||||
|
marker := (*offsets)[len(*offsets)-1]
|
||||||
|
marker.offset[0] = marker.offset[1]
|
||||||
|
marker.color.bg = newState.lbg
|
||||||
|
marker.color.attr = marker.color.attr | tui.FullBg
|
||||||
|
newOffsets := append(*offsets, marker)
|
||||||
|
offsets = &newOffsets
|
||||||
|
|
||||||
|
// Reset the full-line background color
|
||||||
|
lineAnsiState.lbg = -1
|
||||||
}
|
}
|
||||||
} else {
|
return util.ToChars(stringBytes(trimmed)), offsets
|
||||||
// When color is disabled but ansi option is given,
|
|
||||||
// we simply strip out ANSI codes from the input
|
|
||||||
ansiProcessor = func(data []byte) (util.Chars, *[]ansiOffset) {
|
|
||||||
trimmed, _, _ := extractColor(string(data), nil, nil)
|
|
||||||
return util.RunesToChars([]rune(trimmed)), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ansiProcessorRunes = func(data []rune) (util.Chars, *[]ansiOffset) {
|
|
||||||
return ansiProcessor([]byte(string(data)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Chunk list
|
// Chunk list
|
||||||
|
cache := NewChunkCache()
|
||||||
var chunkList *ChunkList
|
var chunkList *ChunkList
|
||||||
|
var itemIndex int32
|
||||||
header := make([]string, 0, opts.HeaderLines)
|
header := make([]string, 0, opts.HeaderLines)
|
||||||
if len(opts.WithNth) == 0 {
|
if opts.WithNth == nil {
|
||||||
chunkList = NewChunkList(func(data []byte, index int) *Item {
|
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
||||||
if len(header) < opts.HeaderLines {
|
if len(header) < opts.HeaderLines {
|
||||||
header = append(header, string(data))
|
header = append(header, byteString(data))
|
||||||
eventBox.Set(EvtHeader, header)
|
eventBox.Set(EvtHeader, header)
|
||||||
return nil
|
return false
|
||||||
}
|
}
|
||||||
chars, colors := ansiProcessor(data)
|
item.text, item.colors = ansiProcessor(data)
|
||||||
return &Item{
|
item.text.Index = itemIndex
|
||||||
index: int32(index),
|
itemIndex++
|
||||||
text: chars,
|
return true
|
||||||
colors: colors}
|
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
chunkList = NewChunkList(func(data []byte, index int) *Item {
|
nthTransformer := opts.WithNth(opts.Delimiter)
|
||||||
tokens := Tokenize(util.ToChars(data), opts.Delimiter)
|
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
||||||
trans := Transform(tokens, opts.WithNth)
|
tokens := Tokenize(byteString(data), opts.Delimiter)
|
||||||
if len(header) < opts.HeaderLines {
|
if opts.Ansi && len(tokens) > 1 {
|
||||||
header = append(header, string(joinTokens(trans)))
|
var ansiState *ansiState
|
||||||
eventBox.Set(EvtHeader, header)
|
if prevLineAnsiState != nil {
|
||||||
return nil
|
ansiStateDup := *prevLineAnsiState
|
||||||
|
ansiState = &ansiStateDup
|
||||||
|
}
|
||||||
|
for _, token := range tokens {
|
||||||
|
prevAnsiState := ansiState
|
||||||
|
_, _, ansiState = extractColor(token.text.ToString(), ansiState, nil)
|
||||||
|
if prevAnsiState != nil {
|
||||||
|
token.text.Prepend("\x1b[m" + prevAnsiState.ToString())
|
||||||
|
} else {
|
||||||
|
token.text.Prepend("\x1b[m")
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
textRunes := joinTokens(trans)
|
transformed := nthTransformer(tokens, itemIndex)
|
||||||
item := Item{
|
if len(header) < opts.HeaderLines {
|
||||||
index: int32(index),
|
header = append(header, transformed)
|
||||||
origText: &data,
|
eventBox.Set(EvtHeader, header)
|
||||||
colors: nil}
|
return false
|
||||||
|
}
|
||||||
|
item.text, item.colors = ansiProcessor(stringBytes(transformed))
|
||||||
|
|
||||||
trimmed, colors := ansiProcessorRunes(textRunes)
|
// We should not trim trailing whitespaces with background colors
|
||||||
item.text = trimmed
|
var maxColorOffset int32
|
||||||
item.colors = colors
|
if item.colors != nil {
|
||||||
return &item
|
for _, ansi := range *item.colors {
|
||||||
|
if ansi.color.bg >= 0 {
|
||||||
|
maxColorOffset = util.Max32(maxColorOffset, ansi.offset[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
item.text.TrimTrailingWhitespaces(int(maxColorOffset))
|
||||||
|
item.text.Index = itemIndex
|
||||||
|
item.origText = &data
|
||||||
|
itemIndex++
|
||||||
|
return true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Process executor
|
||||||
|
executor := util.NewExecutor(opts.WithShell)
|
||||||
|
|
||||||
|
// Terminal I/O
|
||||||
|
var terminal *Terminal
|
||||||
|
var err error
|
||||||
|
var initialEnv []string
|
||||||
|
initialReload := opts.extractReloadOnStart()
|
||||||
|
if opts.Filter == nil {
|
||||||
|
terminal, err = NewTerminal(opts, eventBox, executor)
|
||||||
|
if err != nil {
|
||||||
|
return ExitError, err
|
||||||
|
}
|
||||||
|
if len(initialReload) > 0 {
|
||||||
|
var temps []string
|
||||||
|
initialReload, temps = terminal.replacePlaceholderInInitialCommand(initialReload)
|
||||||
|
initialEnv = terminal.environ()
|
||||||
|
defer removeFiles(temps)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Reader
|
// Reader
|
||||||
streamingFilter := opts.Filter != nil && !sort && !opts.Tac && !opts.Sync
|
streamingFilter := opts.Filter != nil && !sort && !opts.Tac && !opts.Sync
|
||||||
|
var reader *Reader
|
||||||
if !streamingFilter {
|
if !streamingFilter {
|
||||||
reader := Reader{func(data []byte) bool {
|
reader = NewReader(func(data []byte) bool {
|
||||||
return chunkList.Push(data)
|
return chunkList.Push(data)
|
||||||
}, eventBox, opts.ReadZero}
|
}, eventBox, executor, opts.ReadZero, opts.Filter == nil)
|
||||||
go reader.ReadSource()
|
|
||||||
|
readyChan := make(chan bool)
|
||||||
|
go reader.ReadSource(opts.Input, opts.WalkerRoot, opts.WalkerOpts, opts.WalkerSkip, initialReload, initialEnv, readyChan)
|
||||||
|
<-readyChan
|
||||||
}
|
}
|
||||||
|
|
||||||
// Matcher
|
// Matcher
|
||||||
forward := true
|
forward := true
|
||||||
for _, cri := range opts.Criteria[1:] {
|
withPos := false
|
||||||
if cri == byEnd {
|
for idx := len(opts.Criteria) - 1; idx > 0; idx-- {
|
||||||
|
switch opts.Criteria[idx] {
|
||||||
|
case byChunk:
|
||||||
|
withPos = true
|
||||||
|
case byEnd:
|
||||||
|
forward = false
|
||||||
|
case byBegin:
|
||||||
|
forward = true
|
||||||
|
case byPathname:
|
||||||
|
withPos = true
|
||||||
forward = false
|
forward = false
|
||||||
break
|
|
||||||
}
|
}
|
||||||
if cri == byBegin {
|
}
|
||||||
break
|
|
||||||
|
nth := opts.Nth
|
||||||
|
inputRevision := revision{}
|
||||||
|
snapshotRevision := revision{}
|
||||||
|
patternCache := make(map[string]*Pattern)
|
||||||
|
denyMutex := sync.Mutex{}
|
||||||
|
denylist := make(map[int32]struct{})
|
||||||
|
clearDenylist := func() {
|
||||||
|
denyMutex.Lock()
|
||||||
|
if len(denylist) > 0 {
|
||||||
|
patternCache = make(map[string]*Pattern)
|
||||||
}
|
}
|
||||||
|
denylist = make(map[int32]struct{})
|
||||||
|
denyMutex.Unlock()
|
||||||
}
|
}
|
||||||
patternBuilder := func(runes []rune) *Pattern {
|
patternBuilder := func(runes []rune) *Pattern {
|
||||||
return BuildPattern(
|
denyMutex.Lock()
|
||||||
opts.Fuzzy, opts.FuzzyAlgo, opts.Extended, opts.Case, opts.Normalize, forward,
|
denylistCopy := maps.Clone(denylist)
|
||||||
opts.Filter == nil, opts.Nth, opts.Delimiter, runes)
|
denyMutex.Unlock()
|
||||||
|
return BuildPattern(cache, patternCache,
|
||||||
|
opts.Fuzzy, opts.FuzzyAlgo, opts.Extended, opts.Case, opts.Normalize, forward, withPos,
|
||||||
|
opts.Filter == nil, nth, opts.Delimiter, inputRevision, runes, denylistCopy)
|
||||||
}
|
}
|
||||||
matcher := NewMatcher(patternBuilder, sort, opts.Tac, eventBox)
|
matcher := NewMatcher(cache, patternBuilder, sort, opts.Tac, eventBox, inputRevision)
|
||||||
|
|
||||||
// Filtering mode
|
// Filtering mode
|
||||||
if opts.Filter != nil {
|
if opts.Filter != nil {
|
||||||
@@ -155,39 +241,44 @@ func Run(opts *Options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pattern := patternBuilder([]rune(*opts.Filter))
|
pattern := patternBuilder([]rune(*opts.Filter))
|
||||||
|
matcher.sort = pattern.sortable
|
||||||
|
|
||||||
found := false
|
found := false
|
||||||
if streamingFilter {
|
if streamingFilter {
|
||||||
slab := util.MakeSlab(slab16Size, slab32Size)
|
slab := util.MakeSlab(slab16Size, slab32Size)
|
||||||
reader := Reader{
|
mutex := sync.Mutex{}
|
||||||
|
reader := NewReader(
|
||||||
func(runes []byte) bool {
|
func(runes []byte) bool {
|
||||||
item := chunkList.trans(runes, 0)
|
item := Item{}
|
||||||
if item != nil {
|
if chunkList.trans(&item, runes) {
|
||||||
if result, _, _ := pattern.MatchItem(item, false, slab); result != nil {
|
mutex.Lock()
|
||||||
|
if result, _, _ := pattern.MatchItem(&item, false, slab); result != nil {
|
||||||
opts.Printer(item.text.ToString())
|
opts.Printer(item.text.ToString())
|
||||||
found = true
|
found = true
|
||||||
}
|
}
|
||||||
|
mutex.Unlock()
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}, eventBox, opts.ReadZero}
|
}, eventBox, executor, opts.ReadZero, false)
|
||||||
reader.ReadSource()
|
reader.ReadSource(opts.Input, opts.WalkerRoot, opts.WalkerOpts, opts.WalkerSkip, initialReload, initialEnv, nil)
|
||||||
} else {
|
} else {
|
||||||
eventBox.Unwatch(EvtReadNew)
|
eventBox.Unwatch(EvtReadNew)
|
||||||
eventBox.WaitFor(EvtReadFin)
|
eventBox.WaitFor(EvtReadFin)
|
||||||
|
|
||||||
snapshot, _ := chunkList.Snapshot()
|
// NOTE: Streaming filter is inherently not compatible with --tail
|
||||||
merger, _ := matcher.scan(MatchRequest{
|
snapshot, _, _ := chunkList.Snapshot(opts.Tail)
|
||||||
|
result := matcher.scan(MatchRequest{
|
||||||
chunks: snapshot,
|
chunks: snapshot,
|
||||||
pattern: pattern})
|
pattern: pattern})
|
||||||
for i := 0; i < merger.Length(); i++ {
|
for i := 0; i < result.merger.Length(); i++ {
|
||||||
opts.Printer(merger.Get(i).item.AsString(opts.Ansi))
|
opts.Printer(result.merger.Get(i).item.AsString(opts.Ansi))
|
||||||
found = true
|
found = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if found {
|
if found {
|
||||||
os.Exit(exitOk)
|
return ExitOk, nil
|
||||||
}
|
}
|
||||||
os.Exit(exitNoMatch)
|
return ExitNoMatch, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Synchronous search
|
// Synchronous search
|
||||||
@@ -198,40 +289,181 @@ func Run(opts *Options) {
|
|||||||
|
|
||||||
// Go interactive
|
// Go interactive
|
||||||
go matcher.Loop()
|
go matcher.Loop()
|
||||||
|
defer matcher.Stop()
|
||||||
|
|
||||||
// Terminal I/O
|
// Handling adaptive height
|
||||||
terminal := NewTerminal(opts, eventBox)
|
maxFit := 0 // Maximum number of items that can fit on screen
|
||||||
deferred := opts.Select1 || opts.Exit0
|
padHeight := 0
|
||||||
|
heightUnknown := opts.Height.auto
|
||||||
|
if heightUnknown {
|
||||||
|
maxFit, padHeight = terminal.MaxFitAndPad()
|
||||||
|
}
|
||||||
|
deferred := opts.Select1 || opts.Exit0 || opts.Sync
|
||||||
go terminal.Loop()
|
go terminal.Loop()
|
||||||
if !deferred {
|
if !deferred && !heightUnknown {
|
||||||
terminal.startChan <- true
|
// Start right away
|
||||||
|
terminal.startChan <- fitpad{-1, -1}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Event coordination
|
// Event coordination
|
||||||
reading := true
|
reading := true
|
||||||
ticks := 0
|
ticks := 0
|
||||||
|
startTick := 0
|
||||||
|
var nextCommand *commandSpec
|
||||||
|
var nextEnviron []string
|
||||||
eventBox.Watch(EvtReadNew)
|
eventBox.Watch(EvtReadNew)
|
||||||
|
total := 0
|
||||||
|
query := []rune{}
|
||||||
|
determine := func(final bool) {
|
||||||
|
if heightUnknown {
|
||||||
|
if total >= maxFit || final {
|
||||||
|
deferred = false
|
||||||
|
heightUnknown = false
|
||||||
|
terminal.startChan <- fitpad{util.Min(total, maxFit), padHeight}
|
||||||
|
}
|
||||||
|
} else if deferred {
|
||||||
|
deferred = false
|
||||||
|
terminal.startChan <- fitpad{-1, -1}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
useSnapshot := false
|
||||||
|
var snapshot []*Chunk
|
||||||
|
var count int
|
||||||
|
restart := func(command commandSpec, environ []string) {
|
||||||
|
if !useSnapshot {
|
||||||
|
clearDenylist()
|
||||||
|
}
|
||||||
|
reading = true
|
||||||
|
startTick = ticks
|
||||||
|
chunkList.Clear()
|
||||||
|
itemIndex = 0
|
||||||
|
inputRevision.bumpMajor()
|
||||||
|
header = make([]string, 0, opts.HeaderLines)
|
||||||
|
readyChan := make(chan bool)
|
||||||
|
go reader.restart(command, environ, readyChan)
|
||||||
|
<-readyChan
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCode := ExitOk
|
||||||
|
stop := false
|
||||||
for {
|
for {
|
||||||
delay := true
|
delay := true
|
||||||
ticks++
|
ticks++
|
||||||
|
input := func() []rune {
|
||||||
|
paused, input := terminal.Input()
|
||||||
|
if !paused {
|
||||||
|
query = input
|
||||||
|
}
|
||||||
|
return query
|
||||||
|
}
|
||||||
eventBox.Wait(func(events *util.Events) {
|
eventBox.Wait(func(events *util.Events) {
|
||||||
defer events.Clear()
|
if _, fin := (*events)[EvtReadFin]; fin {
|
||||||
|
delete(*events, EvtReadNew)
|
||||||
|
}
|
||||||
for evt, value := range *events {
|
for evt, value := range *events {
|
||||||
switch evt {
|
switch evt {
|
||||||
|
case EvtQuit:
|
||||||
|
if reading {
|
||||||
|
reader.terminate()
|
||||||
|
}
|
||||||
|
quitSignal := value.(quitSignal)
|
||||||
|
exitCode = quitSignal.code
|
||||||
|
err = quitSignal.err
|
||||||
|
stop = true
|
||||||
|
return
|
||||||
case EvtReadNew, EvtReadFin:
|
case EvtReadNew, EvtReadFin:
|
||||||
reading = reading && evt == EvtReadNew
|
if evt == EvtReadFin && nextCommand != nil {
|
||||||
snapshot, count := chunkList.Snapshot()
|
restart(*nextCommand, nextEnviron)
|
||||||
terminal.UpdateCount(count, !reading)
|
nextCommand = nil
|
||||||
matcher.Reset(snapshot, terminal.Input(), false, !reading, sort)
|
nextEnviron = nil
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
reading = reading && evt == EvtReadNew
|
||||||
|
}
|
||||||
|
if useSnapshot && evt == EvtReadFin { // reload-sync
|
||||||
|
clearDenylist()
|
||||||
|
useSnapshot = false
|
||||||
|
}
|
||||||
|
if !useSnapshot {
|
||||||
|
if !snapshotRevision.compatible(inputRevision) {
|
||||||
|
query = []rune{}
|
||||||
|
}
|
||||||
|
var changed bool
|
||||||
|
snapshot, count, changed = chunkList.Snapshot(opts.Tail)
|
||||||
|
if changed {
|
||||||
|
inputRevision.bumpMinor()
|
||||||
|
}
|
||||||
|
snapshotRevision = inputRevision
|
||||||
|
}
|
||||||
|
total = count
|
||||||
|
terminal.UpdateCount(total, !reading, value.(*string))
|
||||||
|
if heightUnknown && !deferred {
|
||||||
|
determine(!reading)
|
||||||
|
}
|
||||||
|
matcher.Reset(snapshot, input(), false, !reading, sort, snapshotRevision)
|
||||||
|
|
||||||
case EvtSearchNew:
|
case EvtSearchNew:
|
||||||
|
var command *commandSpec
|
||||||
|
var environ []string
|
||||||
|
var changed bool
|
||||||
switch val := value.(type) {
|
switch val := value.(type) {
|
||||||
case bool:
|
case searchRequest:
|
||||||
sort = val
|
sort = val.sort
|
||||||
|
command = val.command
|
||||||
|
environ = val.environ
|
||||||
|
changed = val.changed
|
||||||
|
bump := false
|
||||||
|
if len(val.denylist) > 0 && val.revision.compatible(inputRevision) {
|
||||||
|
denyMutex.Lock()
|
||||||
|
for _, itemIndex := range val.denylist {
|
||||||
|
denylist[itemIndex] = struct{}{}
|
||||||
|
}
|
||||||
|
denyMutex.Unlock()
|
||||||
|
bump = true
|
||||||
|
}
|
||||||
|
if val.nth != nil {
|
||||||
|
// Change nth and clear caches
|
||||||
|
nth = *val.nth
|
||||||
|
bump = true
|
||||||
|
}
|
||||||
|
if bump {
|
||||||
|
patternCache = make(map[string]*Pattern)
|
||||||
|
cache.Clear()
|
||||||
|
inputRevision.bumpMinor()
|
||||||
|
}
|
||||||
|
if command != nil {
|
||||||
|
useSnapshot = val.sync
|
||||||
|
}
|
||||||
}
|
}
|
||||||
snapshot, _ := chunkList.Snapshot()
|
if command != nil {
|
||||||
matcher.Reset(snapshot, terminal.Input(), true, !reading, sort)
|
if reading {
|
||||||
|
reader.terminate()
|
||||||
|
nextCommand = command
|
||||||
|
nextEnviron = environ
|
||||||
|
} else {
|
||||||
|
restart(*command, environ)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if !useSnapshot {
|
||||||
|
newSnapshot, newCount, changed := chunkList.Snapshot(opts.Tail)
|
||||||
|
if changed {
|
||||||
|
inputRevision.bumpMinor()
|
||||||
|
}
|
||||||
|
// We want to avoid showing empty list when reload is triggered
|
||||||
|
// and the query string is changed at the same time i.e. command != nil && changed
|
||||||
|
if command == nil || newCount > 0 {
|
||||||
|
if snapshotRevision != inputRevision {
|
||||||
|
query = []rune{}
|
||||||
|
}
|
||||||
|
snapshot = newSnapshot
|
||||||
|
snapshotRevision = inputRevision
|
||||||
|
}
|
||||||
|
}
|
||||||
|
matcher.Reset(snapshot, input(), true, !reading, sort, snapshotRevision)
|
||||||
delay = false
|
delay = false
|
||||||
|
|
||||||
case EvtSearchProgress:
|
case EvtSearchProgress:
|
||||||
@@ -241,17 +473,19 @@ func Run(opts *Options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
case EvtHeader:
|
case EvtHeader:
|
||||||
terminal.UpdateHeader(value.([]string))
|
headerPadded := make([]string, opts.HeaderLines)
|
||||||
|
copy(headerPadded, value.([]string))
|
||||||
|
terminal.UpdateHeader(headerPadded)
|
||||||
|
|
||||||
case EvtSearchFin:
|
case EvtSearchFin:
|
||||||
switch val := value.(type) {
|
switch val := value.(type) {
|
||||||
case *Merger:
|
case MatchResult:
|
||||||
|
merger := val.merger
|
||||||
if deferred {
|
if deferred {
|
||||||
count := val.Length()
|
count := merger.Length()
|
||||||
if opts.Select1 && count > 1 || opts.Exit0 && !opts.Select1 && count > 0 {
|
if opts.Select1 && count > 1 || opts.Exit0 && !opts.Select1 && count > 0 {
|
||||||
deferred = false
|
determine(merger.final)
|
||||||
terminal.startChan <- true
|
} else if merger.final {
|
||||||
} else if val.final {
|
|
||||||
if opts.Exit0 && count == 0 || opts.Select1 && count == 1 {
|
if opts.Exit0 && count == 0 || opts.Select1 && count == 1 {
|
||||||
if opts.PrintQuery {
|
if opts.PrintQuery {
|
||||||
opts.Printer(opts.Query)
|
opts.Printer(opts.Query)
|
||||||
@@ -259,28 +493,42 @@ func Run(opts *Options) {
|
|||||||
if len(opts.Expect) > 0 {
|
if len(opts.Expect) > 0 {
|
||||||
opts.Printer("")
|
opts.Printer("")
|
||||||
}
|
}
|
||||||
|
transformer := func(item *Item) string {
|
||||||
|
return item.AsString(opts.Ansi)
|
||||||
|
}
|
||||||
|
if opts.AcceptNth != nil {
|
||||||
|
fn := opts.AcceptNth(opts.Delimiter)
|
||||||
|
transformer = func(item *Item) string {
|
||||||
|
return item.acceptNth(opts.Ansi, opts.Delimiter, fn)
|
||||||
|
}
|
||||||
|
}
|
||||||
for i := 0; i < count; i++ {
|
for i := 0; i < count; i++ {
|
||||||
opts.Printer(val.Get(i).item.AsString(opts.Ansi))
|
opts.Printer(transformer(merger.Get(i).item))
|
||||||
}
|
}
|
||||||
if count > 0 {
|
if count == 0 {
|
||||||
os.Exit(exitOk)
|
exitCode = ExitNoMatch
|
||||||
}
|
}
|
||||||
os.Exit(exitNoMatch)
|
stop = true
|
||||||
|
return
|
||||||
}
|
}
|
||||||
deferred = false
|
determine(merger.final)
|
||||||
terminal.startChan <- true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
terminal.UpdateList(val)
|
terminal.UpdateList(val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
events.Clear()
|
||||||
})
|
})
|
||||||
|
if stop {
|
||||||
|
break
|
||||||
|
}
|
||||||
if delay && reading {
|
if delay && reading {
|
||||||
dur := util.DurWithin(
|
dur := util.DurWithin(
|
||||||
time.Duration(ticks)*coordinatorDelayStep,
|
time.Duration(ticks-startTick)*coordinatorDelayStep,
|
||||||
0, coordinatorDelayMax)
|
0, coordinatorDelayMax)
|
||||||
time.Sleep(dur)
|
time.Sleep(dur)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return exitCode, err
|
||||||
}
|
}
|
||||||
|
|||||||
18
src/deps
18
src/deps
@@ -1,18 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
if [ -z "$GOPATH" ]; then
|
|
||||||
echo '$GOPATH not defined'
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
reset() (
|
|
||||||
cd "$GOPATH/src/$1"
|
|
||||||
export GIT_DIR="$(pwd)/.git"
|
|
||||||
[ "$(git rev-parse HEAD)" = "$2" ] ||
|
|
||||||
(git fetch && git reset --hard "$2")
|
|
||||||
)
|
|
||||||
|
|
||||||
reset github.com/junegunn/go-isatty 66b8e73f3f5cda9f96b69efd03dd3d7fc4a5cdb8
|
|
||||||
reset github.com/junegunn/go-runewidth 63c378b851290989b19ca955468386485f118c65
|
|
||||||
reset github.com/junegunn/go-shellwords 33bd8f1ebe16d6e5eb688cc885749a63059e9167
|
|
||||||
reset golang.org/x/crypto abc5fa7ad02123a41f02bf1391c9760f7586e608
|
|
||||||
35
src/functions.go
Normal file
35
src/functions.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package fzf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
func WriteTemporaryFile(data []string, printSep string) string {
|
||||||
|
f, err := os.CreateTemp("", "fzf-temp-*")
|
||||||
|
if err != nil {
|
||||||
|
// Unable to create temporary file
|
||||||
|
// FIXME: Should we terminate the program?
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
f.WriteString(strings.Join(data, printSep))
|
||||||
|
f.WriteString(printSep)
|
||||||
|
return f.Name()
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeFiles(files []string) {
|
||||||
|
for _, filename := range files {
|
||||||
|
os.Remove(filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringBytes(data string) []byte {
|
||||||
|
return unsafe.Slice(unsafe.StringData(data), len(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
func byteString(data []byte) string {
|
||||||
|
return unsafe.String(unsafe.SliceData(data), len(data))
|
||||||
|
}
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import "github.com/junegunn/fzf/src"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
fzf.Run(fzf.ParseOptions())
|
|
||||||
}
|
|
||||||
@@ -2,7 +2,6 @@ package fzf
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
@@ -26,12 +25,12 @@ func NewHistory(path string, maxSize int) (*History, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read history file
|
// Read history file
|
||||||
data, err := ioutil.ReadFile(path)
|
data, err := os.ReadFile(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// If it doesn't exist, check if we can create a file with the name
|
// If it doesn't exist, check if we can create a file with the name
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
data = []byte{}
|
data = []byte{}
|
||||||
if err := ioutil.WriteFile(path, data, 0600); err != nil {
|
if err := os.WriteFile(path, data, 0600); err != nil {
|
||||||
return nil, fmtError(err)
|
return nil, fmtError(err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -59,14 +58,14 @@ func (h *History) append(line string) error {
|
|||||||
|
|
||||||
lines := append(h.lines[:len(h.lines)-1], line)
|
lines := append(h.lines[:len(h.lines)-1], line)
|
||||||
if len(lines) > h.maxSize {
|
if len(lines) > h.maxSize {
|
||||||
lines = lines[len(lines)-h.maxSize : len(lines)]
|
lines = lines[len(lines)-h.maxSize:]
|
||||||
}
|
}
|
||||||
h.lines = append(lines, "")
|
h.lines = append(lines, "")
|
||||||
return ioutil.WriteFile(h.path, []byte(strings.Join(h.lines, "\n")), 0600)
|
return os.WriteFile(h.path, []byte(strings.Join(h.lines, "\n")), 0600)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *History) override(str string) {
|
func (h *History) override(str string) {
|
||||||
// You can update the history but they're not written to the file
|
// You can update the history, but they're not written to the file
|
||||||
if h.cursor == len(h.lines)-1 {
|
if h.cursor == len(h.lines)-1 {
|
||||||
h.lines[h.cursor] = str
|
h.lines[h.cursor] = str
|
||||||
} else if h.cursor < len(h.lines)-1 {
|
} else if h.cursor < len(h.lines)-1 {
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
"os"
|
||||||
"os/user"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
@@ -12,16 +10,12 @@ func TestHistory(t *testing.T) {
|
|||||||
maxHistory := 50
|
maxHistory := 50
|
||||||
|
|
||||||
// Invalid arguments
|
// Invalid arguments
|
||||||
user, _ := user.Current()
|
|
||||||
var paths []string
|
var paths []string
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
// GOPATH should exist, so we shouldn't be able to override it
|
// GOPATH should exist, so we shouldn't be able to override it
|
||||||
paths = []string{os.Getenv("GOPATH")}
|
paths = []string{os.Getenv("GOPATH")}
|
||||||
} else {
|
} else {
|
||||||
paths = []string{"/etc", "/proc"}
|
paths = []string{"/etc", "/proc"}
|
||||||
if user.Name != "root" {
|
|
||||||
paths = append(paths, "/etc/sudoers")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, path := range paths {
|
for _, path := range paths {
|
||||||
@@ -30,7 +24,7 @@ func TestHistory(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
f, _ := ioutil.TempFile("", "fzf-history")
|
f, _ := os.CreateTemp("", "fzf-history")
|
||||||
f.Close()
|
f.Close()
|
||||||
|
|
||||||
{ // Append lines
|
{ // Append lines
|
||||||
|
|||||||
34
src/item.go
34
src/item.go
@@ -1,21 +1,35 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"math"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Item represents each input line
|
type transformed struct {
|
||||||
|
// Because nth can be changed dynamically by change-nth action, we need to
|
||||||
|
// keep the revision number at the time of transformation.
|
||||||
|
revision revision
|
||||||
|
tokens []Token
|
||||||
|
}
|
||||||
|
|
||||||
|
// Item represents each input line. 56 bytes.
|
||||||
type Item struct {
|
type Item struct {
|
||||||
index int32
|
text util.Chars // 32 = 24 + 1 + 1 + 2 + 4
|
||||||
text util.Chars
|
transformed *transformed // 8
|
||||||
origText *[]byte
|
origText *[]byte // 8
|
||||||
colors *[]ansiOffset
|
colors *[]ansiOffset // 8
|
||||||
transformed []Token
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Index returns ordinal index of the Item
|
// Index returns ordinal index of the Item
|
||||||
func (item *Item) Index() int32 {
|
func (item *Item) Index() int32 {
|
||||||
return item.index
|
return item.text.Index
|
||||||
|
}
|
||||||
|
|
||||||
|
var minItem = Item{text: util.Chars{Index: math.MinInt32}}
|
||||||
|
|
||||||
|
func (item *Item) TrimLength() uint16 {
|
||||||
|
return item.text.TrimLength()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Colors returns ansiOffsets of the Item
|
// Colors returns ansiOffsets of the Item
|
||||||
@@ -37,3 +51,9 @@ func (item *Item) AsString(stripAnsi bool) string {
|
|||||||
}
|
}
|
||||||
return item.text.ToString()
|
return item.text.ToString()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (item *Item) acceptNth(stripAnsi bool, delimiter Delimiter, transformer func([]Token, int32) string) string {
|
||||||
|
tokens := Tokenize(item.AsString(stripAnsi), delimiter)
|
||||||
|
transformed := transformer(tokens, item.Index())
|
||||||
|
return StripLastDelimiter(transformed, delimiter)
|
||||||
|
}
|
||||||
|
|||||||
130
src/matcher.go
130
src/matcher.go
@@ -12,14 +12,30 @@ import (
|
|||||||
|
|
||||||
// MatchRequest represents a search request
|
// MatchRequest represents a search request
|
||||||
type MatchRequest struct {
|
type MatchRequest struct {
|
||||||
chunks []*Chunk
|
chunks []*Chunk
|
||||||
pattern *Pattern
|
pattern *Pattern
|
||||||
final bool
|
final bool
|
||||||
sort bool
|
sort bool
|
||||||
|
revision revision
|
||||||
|
}
|
||||||
|
|
||||||
|
type MatchResult struct {
|
||||||
|
merger *Merger
|
||||||
|
passMerger *Merger
|
||||||
|
cancelled bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mr MatchResult) cacheable() bool {
|
||||||
|
return mr.merger != nil && mr.merger.cacheable()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mr MatchResult) final() bool {
|
||||||
|
return mr.merger != nil && mr.merger.final
|
||||||
}
|
}
|
||||||
|
|
||||||
// Matcher is responsible for performing search
|
// Matcher is responsible for performing search
|
||||||
type Matcher struct {
|
type Matcher struct {
|
||||||
|
cache *ChunkCache
|
||||||
patternBuilder func([]rune) *Pattern
|
patternBuilder func([]rune) *Pattern
|
||||||
sort bool
|
sort bool
|
||||||
tac bool
|
tac bool
|
||||||
@@ -27,7 +43,8 @@ type Matcher struct {
|
|||||||
reqBox *util.EventBox
|
reqBox *util.EventBox
|
||||||
partitions int
|
partitions int
|
||||||
slab []*util.Slab
|
slab []*util.Slab
|
||||||
mergerCache map[string]*Merger
|
mergerCache map[string]MatchResult
|
||||||
|
revision revision
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -36,10 +53,11 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// NewMatcher returns a new Matcher
|
// NewMatcher returns a new Matcher
|
||||||
func NewMatcher(patternBuilder func([]rune) *Pattern,
|
func NewMatcher(cache *ChunkCache, patternBuilder func([]rune) *Pattern,
|
||||||
sort bool, tac bool, eventBox *util.EventBox) *Matcher {
|
sort bool, tac bool, eventBox *util.EventBox, revision revision) *Matcher {
|
||||||
partitions := util.Min(numPartitionsMultiplier*runtime.NumCPU(), maxPartitions)
|
partitions := util.Min(numPartitionsMultiplier*runtime.NumCPU(), maxPartitions)
|
||||||
return &Matcher{
|
return &Matcher{
|
||||||
|
cache: cache,
|
||||||
patternBuilder: patternBuilder,
|
patternBuilder: patternBuilder,
|
||||||
sort: sort,
|
sort: sort,
|
||||||
tac: tac,
|
tac: tac,
|
||||||
@@ -47,7 +65,8 @@ func NewMatcher(patternBuilder func([]rune) *Pattern,
|
|||||||
reqBox: util.NewEventBox(),
|
reqBox: util.NewEventBox(),
|
||||||
partitions: partitions,
|
partitions: partitions,
|
||||||
slab: make([]*util.Slab, partitions),
|
slab: make([]*util.Slab, partitions),
|
||||||
mergerCache: make(map[string]*Merger)}
|
mergerCache: make(map[string]MatchResult),
|
||||||
|
revision: revision}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Loop puts Matcher in action
|
// Loop puts Matcher in action
|
||||||
@@ -57,8 +76,13 @@ func (m *Matcher) Loop() {
|
|||||||
for {
|
for {
|
||||||
var request MatchRequest
|
var request MatchRequest
|
||||||
|
|
||||||
|
stop := false
|
||||||
m.reqBox.Wait(func(events *util.Events) {
|
m.reqBox.Wait(func(events *util.Events) {
|
||||||
for _, val := range *events {
|
for t, val := range *events {
|
||||||
|
if t == reqQuit {
|
||||||
|
stop = true
|
||||||
|
return
|
||||||
|
}
|
||||||
switch val := val.(type) {
|
switch val := val.(type) {
|
||||||
case MatchRequest:
|
case MatchRequest:
|
||||||
request = val
|
request = val
|
||||||
@@ -68,42 +92,49 @@ func (m *Matcher) Loop() {
|
|||||||
}
|
}
|
||||||
events.Clear()
|
events.Clear()
|
||||||
})
|
})
|
||||||
|
if stop {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
if request.sort != m.sort {
|
cacheCleared := false
|
||||||
|
if request.sort != m.sort || request.revision != m.revision {
|
||||||
m.sort = request.sort
|
m.sort = request.sort
|
||||||
m.mergerCache = make(map[string]*Merger)
|
m.mergerCache = make(map[string]MatchResult)
|
||||||
clearChunkCache()
|
if !request.revision.compatible(m.revision) {
|
||||||
|
m.cache.Clear()
|
||||||
|
}
|
||||||
|
m.revision = request.revision
|
||||||
|
cacheCleared = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Restart search
|
// Restart search
|
||||||
patternString := request.pattern.AsString()
|
patternString := request.pattern.AsString()
|
||||||
var merger *Merger
|
var result MatchResult
|
||||||
cancelled := false
|
|
||||||
count := CountItems(request.chunks)
|
count := CountItems(request.chunks)
|
||||||
|
|
||||||
foundCache := false
|
if !cacheCleared {
|
||||||
if count == prevCount {
|
if count == prevCount {
|
||||||
// Look up mergerCache
|
// Look up mergerCache
|
||||||
if cached, found := m.mergerCache[patternString]; found {
|
if cached, found := m.mergerCache[patternString]; found && cached.final() == request.final {
|
||||||
foundCache = true
|
result = cached
|
||||||
merger = cached
|
}
|
||||||
|
} else {
|
||||||
|
// Invalidate mergerCache
|
||||||
|
prevCount = count
|
||||||
|
m.mergerCache = make(map[string]MatchResult)
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
// Invalidate mergerCache
|
|
||||||
prevCount = count
|
|
||||||
m.mergerCache = make(map[string]*Merger)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !foundCache {
|
if result.merger == nil {
|
||||||
merger, cancelled = m.scan(request)
|
result = m.scan(request)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !cancelled {
|
if !result.cancelled {
|
||||||
if merger.cacheable() {
|
if result.cacheable() {
|
||||||
m.mergerCache[patternString] = merger
|
m.mergerCache[patternString] = result
|
||||||
}
|
}
|
||||||
merger.final = request.final
|
result.merger.final = request.final
|
||||||
m.eventBox.Set(EvtSearchFin, merger)
|
m.eventBox.Set(EvtSearchFin, result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -131,21 +162,25 @@ func (m *Matcher) sliceChunks(chunks []*Chunk) [][]*Chunk {
|
|||||||
|
|
||||||
type partialResult struct {
|
type partialResult struct {
|
||||||
index int
|
index int
|
||||||
matches []*Result
|
matches []Result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
func (m *Matcher) scan(request MatchRequest) MatchResult {
|
||||||
startedAt := time.Now()
|
startedAt := time.Now()
|
||||||
|
|
||||||
numChunks := len(request.chunks)
|
numChunks := len(request.chunks)
|
||||||
if numChunks == 0 {
|
if numChunks == 0 {
|
||||||
return EmptyMerger, false
|
m := EmptyMerger(request.revision)
|
||||||
|
return MatchResult{m, m, false}
|
||||||
}
|
}
|
||||||
pattern := request.pattern
|
pattern := request.pattern
|
||||||
|
passMerger := PassMerger(&request.chunks, m.tac, request.revision)
|
||||||
if pattern.IsEmpty() {
|
if pattern.IsEmpty() {
|
||||||
return PassMerger(&request.chunks, m.tac), false
|
return MatchResult{passMerger, passMerger, false}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
minIndex := request.chunks[0].items[0].Index()
|
||||||
|
maxIndex := request.chunks[numChunks-1].lastIndex(minIndex)
|
||||||
cancelled := util.NewAtomicBool(false)
|
cancelled := util.NewAtomicBool(false)
|
||||||
|
|
||||||
slices := m.sliceChunks(request.chunks)
|
slices := m.sliceChunks(request.chunks)
|
||||||
@@ -162,7 +197,7 @@ func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
|||||||
go func(idx int, slab *util.Slab, chunks []*Chunk) {
|
go func(idx int, slab *util.Slab, chunks []*Chunk) {
|
||||||
defer func() { waitGroup.Done() }()
|
defer func() { waitGroup.Done() }()
|
||||||
count := 0
|
count := 0
|
||||||
allMatches := make([][]*Result, len(chunks))
|
allMatches := make([][]Result, len(chunks))
|
||||||
for idx, chunk := range chunks {
|
for idx, chunk := range chunks {
|
||||||
matches := request.pattern.Match(chunk, slab)
|
matches := request.pattern.Match(chunk, slab)
|
||||||
allMatches[idx] = matches
|
allMatches[idx] = matches
|
||||||
@@ -172,11 +207,11 @@ func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
|||||||
}
|
}
|
||||||
countChan <- len(matches)
|
countChan <- len(matches)
|
||||||
}
|
}
|
||||||
sliceMatches := make([]*Result, 0, count)
|
sliceMatches := make([]Result, 0, count)
|
||||||
for _, matches := range allMatches {
|
for _, matches := range allMatches {
|
||||||
sliceMatches = append(sliceMatches, matches...)
|
sliceMatches = append(sliceMatches, matches...)
|
||||||
}
|
}
|
||||||
if m.sort {
|
if m.sort && request.pattern.sortable {
|
||||||
if m.tac {
|
if m.tac {
|
||||||
sort.Sort(ByRelevanceTac(sliceMatches))
|
sort.Sort(ByRelevanceTac(sliceMatches))
|
||||||
} else {
|
} else {
|
||||||
@@ -204,24 +239,25 @@ func (m *Matcher) scan(request MatchRequest) (*Merger, bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if m.reqBox.Peek(reqReset) {
|
if m.reqBox.Peek(reqReset) {
|
||||||
return nil, wait()
|
return MatchResult{nil, nil, wait()}
|
||||||
}
|
}
|
||||||
|
|
||||||
if time.Now().Sub(startedAt) > progressMinDuration {
|
if time.Since(startedAt) > progressMinDuration {
|
||||||
m.eventBox.Set(EvtSearchProgress, float32(count)/float32(numChunks))
|
m.eventBox.Set(EvtSearchProgress, float32(count)/float32(numChunks))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
partialResults := make([][]*Result, numSlices)
|
partialResults := make([][]Result, numSlices)
|
||||||
for _ = range slices {
|
for range slices {
|
||||||
partialResult := <-resultChan
|
partialResult := <-resultChan
|
||||||
partialResults[partialResult.index] = partialResult.matches
|
partialResults[partialResult.index] = partialResult.matches
|
||||||
}
|
}
|
||||||
return NewMerger(pattern, partialResults, m.sort, m.tac), false
|
merger := NewMerger(pattern, partialResults, m.sort && request.pattern.sortable, m.tac, request.revision, minIndex, maxIndex)
|
||||||
|
return MatchResult{merger, passMerger, false}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reset is called to interrupt/signal the ongoing search
|
// Reset is called to interrupt/signal the ongoing search
|
||||||
func (m *Matcher) Reset(chunks []*Chunk, patternRunes []rune, cancel bool, final bool, sort bool) {
|
func (m *Matcher) Reset(chunks []*Chunk, patternRunes []rune, cancel bool, final bool, sort bool, revision revision) {
|
||||||
pattern := m.patternBuilder(patternRunes)
|
pattern := m.patternBuilder(patternRunes)
|
||||||
|
|
||||||
var event util.EventType
|
var event util.EventType
|
||||||
@@ -230,5 +266,9 @@ func (m *Matcher) Reset(chunks []*Chunk, patternRunes []rune, cancel bool, final
|
|||||||
} else {
|
} else {
|
||||||
event = reqRetry
|
event = reqRetry
|
||||||
}
|
}
|
||||||
m.reqBox.Set(event, MatchRequest{chunks, pattern, final, sort})
|
m.reqBox.Set(event, MatchRequest{chunks, pattern, final, sort, revision})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Matcher) Stop() {
|
||||||
|
m.reqBox.Set(reqQuit, nil)
|
||||||
}
|
}
|
||||||
|
|||||||
126
src/merger.go
126
src/merger.go
@@ -3,49 +3,67 @@ package fzf
|
|||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
// EmptyMerger is a Merger with no data
|
// EmptyMerger is a Merger with no data
|
||||||
var EmptyMerger = NewMerger(nil, [][]*Result{}, false, false)
|
func EmptyMerger(revision revision) *Merger {
|
||||||
|
return NewMerger(nil, [][]Result{}, false, false, revision, 0, 0)
|
||||||
|
}
|
||||||
|
|
||||||
// Merger holds a set of locally sorted lists of items and provides the view of
|
// Merger holds a set of locally sorted lists of items and provides the view of
|
||||||
// a single, globally-sorted list
|
// a single, globally-sorted list
|
||||||
type Merger struct {
|
type Merger struct {
|
||||||
pattern *Pattern
|
pattern *Pattern
|
||||||
lists [][]*Result
|
lists [][]Result
|
||||||
merged []*Result
|
merged []Result
|
||||||
chunks *[]*Chunk
|
chunks *[]*Chunk
|
||||||
cursors []int
|
cursors []int
|
||||||
sorted bool
|
sorted bool
|
||||||
tac bool
|
tac bool
|
||||||
final bool
|
final bool
|
||||||
count int
|
count int
|
||||||
|
pass bool
|
||||||
|
revision revision
|
||||||
|
minIndex int32
|
||||||
|
maxIndex int32
|
||||||
}
|
}
|
||||||
|
|
||||||
// PassMerger returns a new Merger that simply returns the items in the
|
// PassMerger returns a new Merger that simply returns the items in the
|
||||||
// original order
|
// original order
|
||||||
func PassMerger(chunks *[]*Chunk, tac bool) *Merger {
|
func PassMerger(chunks *[]*Chunk, tac bool, revision revision) *Merger {
|
||||||
|
var minIndex, maxIndex int32
|
||||||
|
if len(*chunks) > 0 {
|
||||||
|
minIndex = (*chunks)[0].items[0].Index()
|
||||||
|
maxIndex = (*chunks)[len(*chunks)-1].lastIndex(minIndex)
|
||||||
|
}
|
||||||
mg := Merger{
|
mg := Merger{
|
||||||
pattern: nil,
|
pattern: nil,
|
||||||
chunks: chunks,
|
chunks: chunks,
|
||||||
tac: tac,
|
tac: tac,
|
||||||
count: 0}
|
count: 0,
|
||||||
|
pass: true,
|
||||||
|
revision: revision,
|
||||||
|
minIndex: minIndex,
|
||||||
|
maxIndex: maxIndex}
|
||||||
|
|
||||||
for _, chunk := range *mg.chunks {
|
for _, chunk := range *mg.chunks {
|
||||||
mg.count += len(*chunk)
|
mg.count += chunk.count
|
||||||
}
|
}
|
||||||
return &mg
|
return &mg
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMerger returns a new Merger
|
// NewMerger returns a new Merger
|
||||||
func NewMerger(pattern *Pattern, lists [][]*Result, sorted bool, tac bool) *Merger {
|
func NewMerger(pattern *Pattern, lists [][]Result, sorted bool, tac bool, revision revision, minIndex int32, maxIndex int32) *Merger {
|
||||||
mg := Merger{
|
mg := Merger{
|
||||||
pattern: pattern,
|
pattern: pattern,
|
||||||
lists: lists,
|
lists: lists,
|
||||||
merged: []*Result{},
|
merged: []Result{},
|
||||||
chunks: nil,
|
chunks: nil,
|
||||||
cursors: make([]int, len(lists)),
|
cursors: make([]int, len(lists)),
|
||||||
sorted: sorted,
|
sorted: sorted,
|
||||||
tac: tac,
|
tac: tac,
|
||||||
final: false,
|
final: false,
|
||||||
count: 0}
|
count: 0,
|
||||||
|
revision: revision,
|
||||||
|
minIndex: minIndex,
|
||||||
|
maxIndex: maxIndex}
|
||||||
|
|
||||||
for _, list := range mg.lists {
|
for _, list := range mg.lists {
|
||||||
mg.count += len(list)
|
mg.count += len(list)
|
||||||
@@ -53,19 +71,57 @@ func NewMerger(pattern *Pattern, lists [][]*Result, sorted bool, tac bool) *Merg
|
|||||||
return &mg
|
return &mg
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Revision returns revision number
|
||||||
|
func (mg *Merger) Revision() revision {
|
||||||
|
return mg.revision
|
||||||
|
}
|
||||||
|
|
||||||
// Length returns the number of items
|
// Length returns the number of items
|
||||||
func (mg *Merger) Length() int {
|
func (mg *Merger) Length() int {
|
||||||
return mg.count
|
return mg.count
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (mg *Merger) First() Result {
|
||||||
|
if mg.tac && !mg.sorted {
|
||||||
|
return mg.Get(mg.count - 1)
|
||||||
|
}
|
||||||
|
return mg.Get(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindIndex returns the index of the item with the given item index
|
||||||
|
func (mg *Merger) FindIndex(itemIndex int32) int {
|
||||||
|
index := -1
|
||||||
|
if mg.pass {
|
||||||
|
index = int(itemIndex - mg.minIndex)
|
||||||
|
if mg.tac {
|
||||||
|
index = mg.count - index - 1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for i := 0; i < mg.count; i++ {
|
||||||
|
if mg.Get(i).item.Index() == itemIndex {
|
||||||
|
index = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return index
|
||||||
|
}
|
||||||
|
|
||||||
// Get returns the pointer to the Result object indexed by the given integer
|
// Get returns the pointer to the Result object indexed by the given integer
|
||||||
func (mg *Merger) Get(idx int) *Result {
|
func (mg *Merger) Get(idx int) Result {
|
||||||
if mg.chunks != nil {
|
if mg.chunks != nil {
|
||||||
if mg.tac {
|
if mg.tac {
|
||||||
idx = mg.count - idx - 1
|
idx = mg.count - idx - 1
|
||||||
}
|
}
|
||||||
|
firstChunk := (*mg.chunks)[0]
|
||||||
|
if firstChunk.count < chunkSize && idx >= firstChunk.count {
|
||||||
|
idx -= firstChunk.count
|
||||||
|
|
||||||
|
chunk := (*mg.chunks)[idx/chunkSize+1]
|
||||||
|
return Result{item: &chunk.items[idx%chunkSize]}
|
||||||
|
}
|
||||||
chunk := (*mg.chunks)[idx/chunkSize]
|
chunk := (*mg.chunks)[idx/chunkSize]
|
||||||
return &Result{item: (*chunk)[idx%chunkSize]}
|
return Result{item: &chunk.items[idx%chunkSize]}
|
||||||
}
|
}
|
||||||
|
|
||||||
if mg.sorted {
|
if mg.sorted {
|
||||||
@@ -85,11 +141,20 @@ func (mg *Merger) Get(idx int) *Result {
|
|||||||
panic(fmt.Sprintf("Index out of bounds (unsorted, %d/%d)", idx, mg.count))
|
panic(fmt.Sprintf("Index out of bounds (unsorted, %d/%d)", idx, mg.count))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (mg *Merger) ToMap() map[int32]Result {
|
||||||
|
ret := make(map[int32]Result, mg.count)
|
||||||
|
for i := 0; i < mg.count; i++ {
|
||||||
|
result := mg.Get(i)
|
||||||
|
ret[result.Index()] = result
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
func (mg *Merger) cacheable() bool {
|
func (mg *Merger) cacheable() bool {
|
||||||
return mg.count < mergerCacheMax
|
return mg.count < mergerCacheMax
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mg *Merger) mergedGet(idx int) *Result {
|
func (mg *Merger) mergedGet(idx int) Result {
|
||||||
for i := len(mg.merged); i <= idx; i++ {
|
for i := len(mg.merged); i <= idx; i++ {
|
||||||
minRank := minRank()
|
minRank := minRank()
|
||||||
minIdx := -1
|
minIdx := -1
|
||||||
@@ -100,13 +165,12 @@ func (mg *Merger) mergedGet(idx int) *Result {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if cursor >= 0 {
|
if cursor >= 0 {
|
||||||
rank := list[cursor].rank
|
rank := list[cursor]
|
||||||
if minIdx < 0 || compareRanks(rank, minRank, mg.tac) {
|
if minIdx < 0 || compareRanks(rank, minRank, mg.tac) {
|
||||||
minRank = rank
|
minRank = rank
|
||||||
minIdx = listIdx
|
minIdx = listIdx
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mg.cursors[listIdx] = cursor
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if minIdx >= 0 {
|
if minIdx >= 0 {
|
||||||
|
|||||||
@@ -15,28 +15,29 @@ func assert(t *testing.T, cond bool, msg ...string) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func randResult() *Result {
|
func randResult() Result {
|
||||||
str := fmt.Sprintf("%d", rand.Uint32())
|
str := fmt.Sprintf("%d", rand.Uint32())
|
||||||
return &Result{
|
chars := util.ToChars([]byte(str))
|
||||||
item: &Item{text: util.RunesToChars([]rune(str))},
|
chars.Index = rand.Int31()
|
||||||
rank: rank{index: rand.Int31()}}
|
return Result{item: &Item{text: chars}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEmptyMerger(t *testing.T) {
|
func TestEmptyMerger(t *testing.T) {
|
||||||
assert(t, EmptyMerger.Length() == 0, "Not empty")
|
r := revision{}
|
||||||
assert(t, EmptyMerger.count == 0, "Invalid count")
|
assert(t, EmptyMerger(r).Length() == 0, "Not empty")
|
||||||
assert(t, len(EmptyMerger.lists) == 0, "Invalid lists")
|
assert(t, EmptyMerger(r).count == 0, "Invalid count")
|
||||||
assert(t, len(EmptyMerger.merged) == 0, "Invalid merged list")
|
assert(t, len(EmptyMerger(r).lists) == 0, "Invalid lists")
|
||||||
|
assert(t, len(EmptyMerger(r).merged) == 0, "Invalid merged list")
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildLists(partiallySorted bool) ([][]*Result, []*Result) {
|
func buildLists(partiallySorted bool) ([][]Result, []Result) {
|
||||||
numLists := 4
|
numLists := 4
|
||||||
lists := make([][]*Result, numLists)
|
lists := make([][]Result, numLists)
|
||||||
cnt := 0
|
cnt := 0
|
||||||
for i := 0; i < numLists; i++ {
|
for i := 0; i < numLists; i++ {
|
||||||
numResults := rand.Int() % 20
|
numResults := rand.Int() % 20
|
||||||
cnt += numResults
|
cnt += numResults
|
||||||
lists[i] = make([]*Result, numResults)
|
lists[i] = make([]Result, numResults)
|
||||||
for j := 0; j < numResults; j++ {
|
for j := 0; j < numResults; j++ {
|
||||||
item := randResult()
|
item := randResult()
|
||||||
lists[i][j] = item
|
lists[i][j] = item
|
||||||
@@ -45,7 +46,7 @@ func buildLists(partiallySorted bool) ([][]*Result, []*Result) {
|
|||||||
sort.Sort(ByRelevance(lists[i]))
|
sort.Sort(ByRelevance(lists[i]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
items := []*Result{}
|
items := []Result{}
|
||||||
for _, list := range lists {
|
for _, list := range lists {
|
||||||
items = append(items, list...)
|
items = append(items, list...)
|
||||||
}
|
}
|
||||||
@@ -57,7 +58,7 @@ func TestMergerUnsorted(t *testing.T) {
|
|||||||
cnt := len(items)
|
cnt := len(items)
|
||||||
|
|
||||||
// Not sorted: same order
|
// Not sorted: same order
|
||||||
mg := NewMerger(nil, lists, false, false)
|
mg := NewMerger(nil, lists, false, false, revision{}, 0, 0)
|
||||||
assert(t, cnt == mg.Length(), "Invalid Length")
|
assert(t, cnt == mg.Length(), "Invalid Length")
|
||||||
for i := 0; i < cnt; i++ {
|
for i := 0; i < cnt; i++ {
|
||||||
assert(t, items[i] == mg.Get(i), "Invalid Get")
|
assert(t, items[i] == mg.Get(i), "Invalid Get")
|
||||||
@@ -69,7 +70,7 @@ func TestMergerSorted(t *testing.T) {
|
|||||||
cnt := len(items)
|
cnt := len(items)
|
||||||
|
|
||||||
// Sorted sorted order
|
// Sorted sorted order
|
||||||
mg := NewMerger(nil, lists, true, false)
|
mg := NewMerger(nil, lists, true, false, revision{}, 0, 0)
|
||||||
assert(t, cnt == mg.Length(), "Invalid Length")
|
assert(t, cnt == mg.Length(), "Invalid Length")
|
||||||
sort.Sort(ByRelevance(items))
|
sort.Sort(ByRelevance(items))
|
||||||
for i := 0; i < cnt; i++ {
|
for i := 0; i < cnt; i++ {
|
||||||
@@ -79,7 +80,7 @@ func TestMergerSorted(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Inverse order
|
// Inverse order
|
||||||
mg2 := NewMerger(nil, lists, true, false)
|
mg2 := NewMerger(nil, lists, true, false, revision{}, 0, 0)
|
||||||
for i := cnt - 1; i >= 0; i-- {
|
for i := cnt - 1; i >= 0; i-- {
|
||||||
if items[i] != mg2.Get(i) {
|
if items[i] != mg2.Get(i) {
|
||||||
t.Error("Not sorted", items[i], mg2.Get(i))
|
t.Error("Not sorted", items[i], mg2.Get(i))
|
||||||
|
|||||||
3892
src/options.go
3892
src/options.go
File diff suppressed because it is too large
Load Diff
13
src/options_no_pprof.go
Normal file
13
src/options_no_pprof.go
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
//go:build !pprof
|
||||||
|
// +build !pprof
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
func (o *Options) initProfiling() error {
|
||||||
|
if o.CPUProfile != "" || o.MEMProfile != "" || o.BlockProfile != "" || o.MutexProfile != "" {
|
||||||
|
return errors.New("error: profiling not supported: FZF must be built with '-tags=pprof' to enable profiling")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
73
src/options_pprof.go
Normal file
73
src/options_pprof.go
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
//go:build pprof
|
||||||
|
// +build pprof
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"runtime/pprof"
|
||||||
|
|
||||||
|
"github.com/junegunn/fzf/src/util"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (o *Options) initProfiling() error {
|
||||||
|
if o.CPUProfile != "" {
|
||||||
|
f, err := os.Create(o.CPUProfile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not create CPU profile: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := pprof.StartCPUProfile(f); err != nil {
|
||||||
|
return fmt.Errorf("could not start CPU profile: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
util.AtExit(func() {
|
||||||
|
pprof.StopCPUProfile()
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, "Error: closing cpu profile:", err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
stopProfile := func(name string, f *os.File) {
|
||||||
|
if err := pprof.Lookup(name).WriteTo(f, 0); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: could not write %s profile: %v\n", name, err)
|
||||||
|
}
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: closing %s profile: %v\n", name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if o.MEMProfile != "" {
|
||||||
|
f, err := os.Create(o.MEMProfile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not create MEM profile: %w", err)
|
||||||
|
}
|
||||||
|
util.AtExit(func() {
|
||||||
|
runtime.GC()
|
||||||
|
stopProfile("allocs", f)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if o.BlockProfile != "" {
|
||||||
|
runtime.SetBlockProfileRate(1)
|
||||||
|
f, err := os.Create(o.BlockProfile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not create BLOCK profile: %w", err)
|
||||||
|
}
|
||||||
|
util.AtExit(func() { stopProfile("block", f) })
|
||||||
|
}
|
||||||
|
|
||||||
|
if o.MutexProfile != "" {
|
||||||
|
runtime.SetMutexProfileFraction(1)
|
||||||
|
f, err := os.Create(o.MutexProfile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not create MUTEX profile: %w", err)
|
||||||
|
}
|
||||||
|
util.AtExit(func() { stopProfile("mutex", f) })
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
89
src/options_pprof_test.go
Normal file
89
src/options_pprof_test.go
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
//go:build pprof
|
||||||
|
// +build pprof
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/junegunn/fzf/src/util"
|
||||||
|
)
|
||||||
|
|
||||||
|
// runInitProfileTests is an internal flag used TestInitProfiling
|
||||||
|
var runInitProfileTests = flag.Bool("test-init-profile", false, "run init profile tests")
|
||||||
|
|
||||||
|
func TestInitProfiling(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("short test")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run this test in a separate process since it interferes with
|
||||||
|
// profiling and modifies the global atexit state. Without this
|
||||||
|
// running `go test -bench . -cpuprofile cpu.out` will fail.
|
||||||
|
if !*runInitProfileTests {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
// Make sure we are not the child process.
|
||||||
|
if os.Getenv("_FZF_CHILD_PROC") != "" {
|
||||||
|
t.Fatal("already running as child process!")
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := exec.Command(os.Args[0],
|
||||||
|
"-test.timeout", "30s",
|
||||||
|
"-test.run", "^"+t.Name()+"$",
|
||||||
|
"-test-init-profile",
|
||||||
|
)
|
||||||
|
cmd.Env = append(os.Environ(), "_FZF_CHILD_PROC=1")
|
||||||
|
|
||||||
|
out, err := cmd.CombinedOutput()
|
||||||
|
out = bytes.TrimSpace(out)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Child test process failed: %v:\n%s", err, out)
|
||||||
|
}
|
||||||
|
// Make sure the test actually ran
|
||||||
|
if bytes.Contains(out, []byte("no tests to run")) {
|
||||||
|
t.Fatalf("Failed to run test %q:\n%s", t.Name(), out)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Child process
|
||||||
|
|
||||||
|
tempdir := t.TempDir()
|
||||||
|
t.Cleanup(util.RunAtExitFuncs)
|
||||||
|
|
||||||
|
o := Options{
|
||||||
|
CPUProfile: filepath.Join(tempdir, "cpu.prof"),
|
||||||
|
MEMProfile: filepath.Join(tempdir, "mem.prof"),
|
||||||
|
BlockProfile: filepath.Join(tempdir, "block.prof"),
|
||||||
|
MutexProfile: filepath.Join(tempdir, "mutex.prof"),
|
||||||
|
}
|
||||||
|
if err := o.initProfiling(); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
profiles := []string{
|
||||||
|
o.CPUProfile,
|
||||||
|
o.MEMProfile,
|
||||||
|
o.BlockProfile,
|
||||||
|
o.MutexProfile,
|
||||||
|
}
|
||||||
|
for _, name := range profiles {
|
||||||
|
if _, err := os.Stat(name); err != nil {
|
||||||
|
t.Errorf("Failed to create profile %s: %v", filepath.Base(name), err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
util.RunAtExitFuncs()
|
||||||
|
|
||||||
|
for _, name := range profiles {
|
||||||
|
if _, err := os.Stat(name); err != nil {
|
||||||
|
t.Errorf("Failed to write profile %s: %v", filepath.Base(name), err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,17 +2,20 @@ package fzf
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/tui"
|
"github.com/junegunn/fzf/src/tui"
|
||||||
"github.com/junegunn/fzf/src/util"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDelimiterRegex(t *testing.T) {
|
func TestDelimiterRegex(t *testing.T) {
|
||||||
// Valid regex
|
// Valid regex, but a single character -> string
|
||||||
delim := delimiterRegexp(".")
|
delim := delimiterRegexp(".")
|
||||||
if delim.regex == nil || delim.str != nil {
|
if delim.regex != nil || *delim.str != "." {
|
||||||
|
t.Error(delim)
|
||||||
|
}
|
||||||
|
delim = delimiterRegexp("|")
|
||||||
|
if delim.regex != nil || *delim.str != "|" {
|
||||||
t.Error(delim)
|
t.Error(delim)
|
||||||
}
|
}
|
||||||
// Broken regex -> string
|
// Broken regex -> string
|
||||||
@@ -44,20 +47,20 @@ func TestDelimiterRegex(t *testing.T) {
|
|||||||
|
|
||||||
func TestDelimiterRegexString(t *testing.T) {
|
func TestDelimiterRegexString(t *testing.T) {
|
||||||
delim := delimiterRegexp("*")
|
delim := delimiterRegexp("*")
|
||||||
tokens := Tokenize(util.RunesToChars([]rune("-*--*---**---")), delim)
|
tokens := Tokenize("-*--*---**---", delim)
|
||||||
if delim.regex != nil ||
|
if delim.regex != nil ||
|
||||||
tokens[0].text.ToString() != "-*" ||
|
tokens[0].text.ToString() != "-*" ||
|
||||||
tokens[1].text.ToString() != "--*" ||
|
tokens[1].text.ToString() != "--*" ||
|
||||||
tokens[2].text.ToString() != "---*" ||
|
tokens[2].text.ToString() != "---*" ||
|
||||||
tokens[3].text.ToString() != "*" ||
|
tokens[3].text.ToString() != "*" ||
|
||||||
tokens[4].text.ToString() != "---" {
|
tokens[4].text.ToString() != "---" {
|
||||||
t.Errorf("%s %s %d", delim, tokens, len(tokens))
|
t.Errorf("%s %v %d", delim, tokens, len(tokens))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDelimiterRegexRegex(t *testing.T) {
|
func TestDelimiterRegexRegex(t *testing.T) {
|
||||||
delim := delimiterRegexp("--\\*")
|
delim := delimiterRegexp("--\\*")
|
||||||
tokens := Tokenize(util.RunesToChars([]rune("-*--*---**---")), delim)
|
tokens := Tokenize("-*--*---**---", delim)
|
||||||
if delim.str != nil ||
|
if delim.str != nil ||
|
||||||
tokens[0].text.ToString() != "-*--*" ||
|
tokens[0].text.ToString() != "-*--*" ||
|
||||||
tokens[1].text.ToString() != "---*" ||
|
tokens[1].text.ToString() != "---*" ||
|
||||||
@@ -66,17 +69,30 @@ func TestDelimiterRegexRegex(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDelimiterRegexRegexCaret(t *testing.T) {
|
||||||
|
delim := delimiterRegexp(`(^\s*|\s+)`)
|
||||||
|
tokens := Tokenize("foo bar baz", delim)
|
||||||
|
if delim.str != nil ||
|
||||||
|
len(tokens) != 4 ||
|
||||||
|
tokens[0].text.ToString() != "" ||
|
||||||
|
tokens[1].text.ToString() != "foo " ||
|
||||||
|
tokens[2].text.ToString() != "bar " ||
|
||||||
|
tokens[3].text.ToString() != "baz" {
|
||||||
|
t.Errorf("%s %d", tokens, len(tokens))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestSplitNth(t *testing.T) {
|
func TestSplitNth(t *testing.T) {
|
||||||
{
|
{
|
||||||
ranges := splitNth("..")
|
ranges, _ := splitNth("..")
|
||||||
if len(ranges) != 1 ||
|
if len(ranges) != 1 ||
|
||||||
ranges[0].begin != rangeEllipsis ||
|
ranges[0].begin != rangeEllipsis ||
|
||||||
ranges[0].end != rangeEllipsis {
|
ranges[0].end != rangeEllipsis {
|
||||||
t.Errorf("%s", ranges)
|
t.Errorf("%v", ranges)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
ranges := splitNth("..3,1..,2..3,4..-1,-3..-2,..,2,-2,2..-2,1..-1")
|
ranges, _ := splitNth("..3,1..,2..3,4..-1,-3..-2,..,2,-2,2..-2,1..-1")
|
||||||
if len(ranges) != 10 ||
|
if len(ranges) != 10 ||
|
||||||
ranges[0].begin != rangeEllipsis || ranges[0].end != 3 ||
|
ranges[0].begin != rangeEllipsis || ranges[0].end != 3 ||
|
||||||
ranges[1].begin != rangeEllipsis || ranges[1].end != rangeEllipsis ||
|
ranges[1].begin != rangeEllipsis || ranges[1].end != rangeEllipsis ||
|
||||||
@@ -88,94 +104,99 @@ func TestSplitNth(t *testing.T) {
|
|||||||
ranges[7].begin != -2 || ranges[7].end != -2 ||
|
ranges[7].begin != -2 || ranges[7].end != -2 ||
|
||||||
ranges[8].begin != 2 || ranges[8].end != -2 ||
|
ranges[8].begin != 2 || ranges[8].end != -2 ||
|
||||||
ranges[9].begin != rangeEllipsis || ranges[9].end != rangeEllipsis {
|
ranges[9].begin != rangeEllipsis || ranges[9].end != rangeEllipsis {
|
||||||
t.Errorf("%s", ranges)
|
t.Errorf("%v", ranges)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIrrelevantNth(t *testing.T) {
|
func TestIrrelevantNth(t *testing.T) {
|
||||||
|
index := 0
|
||||||
{
|
{
|
||||||
opts := defaultOptions()
|
opts := defaultOptions()
|
||||||
words := []string{"--nth", "..", "-x"}
|
words := []string{"--nth", "..", "-x"}
|
||||||
parseOptions(opts, words)
|
parseOptions(&index, opts, words)
|
||||||
postProcessOptions(opts)
|
postProcessOptions(opts)
|
||||||
if len(opts.Nth) != 0 {
|
if len(opts.Nth) != 0 {
|
||||||
t.Errorf("nth should be empty: %s", opts.Nth)
|
t.Errorf("nth should be empty: %v", opts.Nth)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, words := range [][]string{[]string{"--nth", "..,3", "+x"}, []string{"--nth", "3,1..", "+x"}, []string{"--nth", "..-1,1", "+x"}} {
|
for _, words := range [][]string{{"--nth", "..,3", "+x"}, {"--nth", "3,1..", "+x"}, {"--nth", "..-1,1", "+x"}} {
|
||||||
{
|
{
|
||||||
opts := defaultOptions()
|
opts := defaultOptions()
|
||||||
parseOptions(opts, words)
|
parseOptions(&index, opts, words)
|
||||||
postProcessOptions(opts)
|
postProcessOptions(opts)
|
||||||
if len(opts.Nth) != 0 {
|
if len(opts.Nth) != 0 {
|
||||||
t.Errorf("nth should be empty: %s", opts.Nth)
|
t.Errorf("nth should be empty: %v", opts.Nth)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
opts := defaultOptions()
|
opts := defaultOptions()
|
||||||
words = append(words, "-x")
|
words = append(words, "-x")
|
||||||
parseOptions(opts, words)
|
parseOptions(&index, opts, words)
|
||||||
postProcessOptions(opts)
|
postProcessOptions(opts)
|
||||||
if len(opts.Nth) != 2 {
|
if len(opts.Nth) != 2 {
|
||||||
t.Errorf("nth should not be empty: %s", opts.Nth)
|
t.Errorf("nth should not be empty: %v", opts.Nth)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseKeys(t *testing.T) {
|
func TestParseKeys(t *testing.T) {
|
||||||
pairs := parseKeyChords("ctrl-z,alt-z,f2,@,Alt-a,!,ctrl-G,J,g,ALT-enter,alt-SPACE", "")
|
pairs, _, _ := parseKeyChords("ctrl-z,alt-z,f2,@,Alt-a,!,ctrl-G,J,g,ctrl-alt-a,ALT-enter,alt-SPACE", "")
|
||||||
check := func(i int, s string) {
|
checkEvent := func(e tui.Event, s string) {
|
||||||
if pairs[i] != s {
|
if pairs[e] != s {
|
||||||
t.Errorf("%s != %s", pairs[i], s)
|
t.Errorf("%s != %s", pairs[e], s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(pairs) != 11 {
|
check := func(et tui.EventType, s string) {
|
||||||
t.Error(11)
|
checkEvent(et.AsEvent(), s)
|
||||||
|
}
|
||||||
|
if len(pairs) != 12 {
|
||||||
|
t.Error(12)
|
||||||
}
|
}
|
||||||
check(tui.CtrlZ, "ctrl-z")
|
check(tui.CtrlZ, "ctrl-z")
|
||||||
check(tui.AltZ, "alt-z")
|
|
||||||
check(tui.F2, "f2")
|
check(tui.F2, "f2")
|
||||||
check(tui.AltZ+'@', "@")
|
check(tui.CtrlG, "ctrl-G")
|
||||||
check(tui.AltA, "Alt-a")
|
checkEvent(tui.AltKey('z'), "alt-z")
|
||||||
check(tui.AltZ+'!', "!")
|
checkEvent(tui.Key('@'), "@")
|
||||||
check(tui.CtrlA+'g'-'a', "ctrl-G")
|
checkEvent(tui.AltKey('a'), "Alt-a")
|
||||||
check(tui.AltZ+'J', "J")
|
checkEvent(tui.Key('!'), "!")
|
||||||
check(tui.AltZ+'g', "g")
|
checkEvent(tui.Key('J'), "J")
|
||||||
check(tui.AltEnter, "ALT-enter")
|
checkEvent(tui.Key('g'), "g")
|
||||||
check(tui.AltSpace, "alt-SPACE")
|
checkEvent(tui.CtrlAltKey('a'), "ctrl-alt-a")
|
||||||
|
checkEvent(tui.CtrlAltKey('m'), "ALT-enter")
|
||||||
|
checkEvent(tui.AltKey(' '), "alt-SPACE")
|
||||||
|
|
||||||
// Synonyms
|
// Synonyms
|
||||||
pairs = parseKeyChords("enter,Return,space,tab,btab,esc,up,down,left,right", "")
|
pairs, _, _ = parseKeyChords("enter,Return,space,tab,btab,esc,up,down,left,right", "")
|
||||||
if len(pairs) != 9 {
|
if len(pairs) != 9 {
|
||||||
t.Error(9)
|
t.Error(9)
|
||||||
}
|
}
|
||||||
check(tui.CtrlM, "Return")
|
check(tui.Enter, "Return")
|
||||||
check(tui.AltZ+' ', "space")
|
checkEvent(tui.Key(' '), "space")
|
||||||
check(tui.Tab, "tab")
|
check(tui.Tab, "tab")
|
||||||
check(tui.BTab, "btab")
|
check(tui.ShiftTab, "btab")
|
||||||
check(tui.ESC, "esc")
|
check(tui.Esc, "esc")
|
||||||
check(tui.Up, "up")
|
check(tui.Up, "up")
|
||||||
check(tui.Down, "down")
|
check(tui.Down, "down")
|
||||||
check(tui.Left, "left")
|
check(tui.Left, "left")
|
||||||
check(tui.Right, "right")
|
check(tui.Right, "right")
|
||||||
|
|
||||||
pairs = parseKeyChords("Tab,Ctrl-I,PgUp,page-up,pgdn,Page-Down,Home,End,Alt-BS,Alt-BSpace,shift-left,shift-right,btab,shift-tab,return,Enter,bspace", "")
|
pairs, _, _ = parseKeyChords("Tab,Ctrl-I,PgUp,page-up,pgdn,Page-Down,Home,End,Alt-BS,Alt-BSpace,shift-left,shift-right,btab,shift-tab,return,Enter,bspace", "")
|
||||||
if len(pairs) != 11 {
|
if len(pairs) != 11 {
|
||||||
t.Error(11)
|
t.Error(11)
|
||||||
}
|
}
|
||||||
check(tui.Tab, "Ctrl-I")
|
check(tui.Tab, "Ctrl-I")
|
||||||
check(tui.PgUp, "page-up")
|
check(tui.PageUp, "page-up")
|
||||||
check(tui.PgDn, "Page-Down")
|
check(tui.PageDown, "Page-Down")
|
||||||
check(tui.Home, "Home")
|
check(tui.Home, "Home")
|
||||||
check(tui.End, "End")
|
check(tui.End, "End")
|
||||||
check(tui.AltBS, "Alt-BSpace")
|
check(tui.AltBackspace, "Alt-BSpace")
|
||||||
check(tui.SLeft, "shift-left")
|
check(tui.ShiftLeft, "shift-left")
|
||||||
check(tui.SRight, "shift-right")
|
check(tui.ShiftRight, "shift-right")
|
||||||
check(tui.BTab, "shift-tab")
|
check(tui.ShiftTab, "shift-tab")
|
||||||
check(tui.CtrlM, "Enter")
|
check(tui.Enter, "Enter")
|
||||||
check(tui.BSpace, "bspace")
|
check(tui.Backspace, "bspace")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseKeysWithComma(t *testing.T) {
|
func TestParseKeysWithComma(t *testing.T) {
|
||||||
@@ -184,97 +205,107 @@ func TestParseKeysWithComma(t *testing.T) {
|
|||||||
t.Errorf("%d != %d", a, b)
|
t.Errorf("%d != %d", a, b)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
check := func(pairs map[int]string, i int, s string) {
|
check := func(pairs map[tui.Event]string, e tui.Event, s string) {
|
||||||
if pairs[i] != s {
|
if pairs[e] != s {
|
||||||
t.Errorf("%s != %s", pairs[i], s)
|
t.Errorf("%s != %s", pairs[e], s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pairs := parseKeyChords(",", "")
|
pairs, _, _ := parseKeyChords(",", "")
|
||||||
checkN(len(pairs), 1)
|
checkN(len(pairs), 1)
|
||||||
check(pairs, tui.AltZ+',', ",")
|
check(pairs, tui.Key(','), ",")
|
||||||
|
|
||||||
pairs = parseKeyChords(",,a,b", "")
|
pairs, _, _ = parseKeyChords(",,a,b", "")
|
||||||
checkN(len(pairs), 3)
|
checkN(len(pairs), 3)
|
||||||
check(pairs, tui.AltZ+'a', "a")
|
check(pairs, tui.Key('a'), "a")
|
||||||
check(pairs, tui.AltZ+'b', "b")
|
check(pairs, tui.Key('b'), "b")
|
||||||
check(pairs, tui.AltZ+',', ",")
|
check(pairs, tui.Key(','), ",")
|
||||||
|
|
||||||
pairs = parseKeyChords("a,b,,", "")
|
pairs, _, _ = parseKeyChords("a,b,,", "")
|
||||||
checkN(len(pairs), 3)
|
checkN(len(pairs), 3)
|
||||||
check(pairs, tui.AltZ+'a', "a")
|
check(pairs, tui.Key('a'), "a")
|
||||||
check(pairs, tui.AltZ+'b', "b")
|
check(pairs, tui.Key('b'), "b")
|
||||||
check(pairs, tui.AltZ+',', ",")
|
check(pairs, tui.Key(','), ",")
|
||||||
|
|
||||||
pairs = parseKeyChords("a,,,b", "")
|
pairs, _, _ = parseKeyChords("a,,,b", "")
|
||||||
checkN(len(pairs), 3)
|
checkN(len(pairs), 3)
|
||||||
check(pairs, tui.AltZ+'a', "a")
|
check(pairs, tui.Key('a'), "a")
|
||||||
check(pairs, tui.AltZ+'b', "b")
|
check(pairs, tui.Key('b'), "b")
|
||||||
check(pairs, tui.AltZ+',', ",")
|
check(pairs, tui.Key(','), ",")
|
||||||
|
|
||||||
pairs = parseKeyChords("a,,,b,c", "")
|
pairs, _, _ = parseKeyChords("a,,,b,c", "")
|
||||||
checkN(len(pairs), 4)
|
checkN(len(pairs), 4)
|
||||||
check(pairs, tui.AltZ+'a', "a")
|
check(pairs, tui.Key('a'), "a")
|
||||||
check(pairs, tui.AltZ+'b', "b")
|
check(pairs, tui.Key('b'), "b")
|
||||||
check(pairs, tui.AltZ+'c', "c")
|
check(pairs, tui.Key('c'), "c")
|
||||||
check(pairs, tui.AltZ+',', ",")
|
check(pairs, tui.Key(','), ",")
|
||||||
|
|
||||||
pairs = parseKeyChords(",,,", "")
|
pairs, _, _ = parseKeyChords(",,,", "")
|
||||||
checkN(len(pairs), 1)
|
checkN(len(pairs), 1)
|
||||||
check(pairs, tui.AltZ+',', ",")
|
check(pairs, tui.Key(','), ",")
|
||||||
|
|
||||||
|
pairs, _, _ = parseKeyChords(",ALT-,,", "")
|
||||||
|
checkN(len(pairs), 1)
|
||||||
|
check(pairs, tui.AltKey(','), "ALT-,")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBind(t *testing.T) {
|
func TestBind(t *testing.T) {
|
||||||
keymap := defaultKeymap()
|
keymap := defaultKeymap()
|
||||||
check := func(keyName int, arg1 string, types ...actionType) {
|
check := func(event tui.Event, arg1 string, types ...actionType) {
|
||||||
if len(keymap[keyName]) != len(types) {
|
if len(keymap[event]) != len(types) {
|
||||||
t.Errorf("invalid number of actions (%d != %d)", len(types), len(keymap[keyName]))
|
t.Errorf("invalid number of actions for %v (%d != %d)",
|
||||||
|
event, len(types), len(keymap[event]))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for idx, action := range keymap[keyName] {
|
for idx, action := range keymap[event] {
|
||||||
if types[idx] != action.t {
|
if types[idx] != action.t {
|
||||||
t.Errorf("invalid action type (%d != %d)", types[idx], action.t)
|
t.Errorf("invalid action type (%d != %d)", types[idx], action.t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(arg1) > 0 && keymap[keyName][0].a != arg1 {
|
if len(arg1) > 0 && keymap[event][0].a != arg1 {
|
||||||
t.Errorf("invalid action argument: (%s != %s)", arg1, keymap[keyName][0].a)
|
t.Errorf("invalid action argument: (%s != %s)", arg1, keymap[event][0].a)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
check(tui.CtrlA, "", actBeginningOfLine)
|
check(tui.CtrlA.AsEvent(), "", actBeginningOfLine)
|
||||||
parseKeymap(keymap,
|
parseKeymap(keymap,
|
||||||
"ctrl-a:kill-line,ctrl-b:toggle-sort+up+down,c:page-up,alt-z:page-down,"+
|
"ctrl-a:kill-line,ctrl-b:toggle-sort+up+down,c:page-up,alt-z:page-down,"+
|
||||||
"f1:execute(ls {})+abort,f2:execute/echo {}, {}, {}/,f3:execute[echo '({})'],f4:execute;less {};,"+
|
"f1:execute(ls {+})+abort+execute(echo \n{+})+select-all,f2:execute/echo {}, {}, {}/,f3:execute[echo '({})'],f4:execute;less {};,"+
|
||||||
"alt-a:execute-Multi@echo (,),[,],/,:,;,%,{}@,alt-b:execute;echo (,),[,],/,:,@,%,{};,"+
|
"alt-a:execute-Multi@echo (,),[,],/,:,;,%,{}@,alt-b:execute;echo (,),[,],/,:,@,%,{};,"+
|
||||||
"x:Execute(foo+bar),X:execute/bar+baz/"+
|
"x:Execute(foo+bar),X:execute/bar+baz/"+
|
||||||
|
",f1:+first,f1:+top"+
|
||||||
",,:abort,::accept,+:execute:++\nfoobar,Y:execute(baz)+up")
|
",,:abort,::accept,+:execute:++\nfoobar,Y:execute(baz)+up")
|
||||||
check(tui.CtrlA, "", actKillLine)
|
check(tui.CtrlA.AsEvent(), "", actKillLine)
|
||||||
check(tui.CtrlB, "", actToggleSort, actUp, actDown)
|
check(tui.CtrlB.AsEvent(), "", actToggleSort, actUp, actDown)
|
||||||
check(tui.AltZ+'c', "", actPageUp)
|
check(tui.Key('c'), "", actPageUp)
|
||||||
check(tui.AltZ+',', "", actAbort)
|
check(tui.Key(','), "", actAbort)
|
||||||
check(tui.AltZ+':', "", actAccept)
|
check(tui.Key(':'), "", actAccept)
|
||||||
check(tui.AltZ, "", actPageDown)
|
check(tui.AltKey('z'), "", actPageDown)
|
||||||
check(tui.F1, "ls {}", actExecute, actAbort)
|
check(tui.F1.AsEvent(), "ls {+}", actExecute, actAbort, actExecute, actSelectAll, actFirst, actFirst)
|
||||||
check(tui.F2, "echo {}, {}, {}", actExecute)
|
check(tui.F2.AsEvent(), "echo {}, {}, {}", actExecute)
|
||||||
check(tui.F3, "echo '({})'", actExecute)
|
check(tui.F3.AsEvent(), "echo '({})'", actExecute)
|
||||||
check(tui.F4, "less {}", actExecute)
|
check(tui.F4.AsEvent(), "less {}", actExecute)
|
||||||
check(tui.AltZ+'x', "foo+bar", actExecute)
|
check(tui.Key('x'), "foo+bar", actExecute)
|
||||||
check(tui.AltZ+'X', "bar+baz", actExecute)
|
check(tui.Key('X'), "bar+baz", actExecute)
|
||||||
check(tui.AltA, "echo (,),[,],/,:,;,%,{}", actExecuteMulti)
|
check(tui.AltKey('a'), "echo (,),[,],/,:,;,%,{}", actExecuteMulti)
|
||||||
check(tui.AltB, "echo (,),[,],/,:,@,%,{}", actExecute)
|
check(tui.AltKey('b'), "echo (,),[,],/,:,@,%,{}", actExecute)
|
||||||
check(tui.AltZ+'+', "++\nfoobar,Y:execute(baz)+up", actExecute)
|
check(tui.Key('+'), "++\nfoobar,Y:execute(baz)+up", actExecute)
|
||||||
|
|
||||||
for idx, char := range []rune{'~', '!', '@', '#', '$', '%', '^', '&', '*', '|', ';', '/'} {
|
for idx, char := range []rune{'~', '!', '@', '#', '$', '%', '^', '&', '*', '|', ';', '/'} {
|
||||||
parseKeymap(keymap, fmt.Sprintf("%d:execute%cfoobar%c", idx%10, char, char))
|
parseKeymap(keymap, fmt.Sprintf("%d:execute%cfoobar%c", idx%10, char, char))
|
||||||
check(tui.AltZ+int([]rune(fmt.Sprintf("%d", idx%10))[0]), "foobar", actExecute)
|
check(tui.Key([]rune(fmt.Sprintf("%d", idx%10))[0]), "foobar", actExecute)
|
||||||
}
|
}
|
||||||
|
|
||||||
parseKeymap(keymap, "f1:abort")
|
parseKeymap(keymap, "f1:abort")
|
||||||
check(tui.F1, "", actAbort)
|
check(tui.F1.AsEvent(), "", actAbort)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestColorSpec(t *testing.T) {
|
func TestColorSpec(t *testing.T) {
|
||||||
|
var base *tui.ColorTheme
|
||||||
theme := tui.Dark256
|
theme := tui.Dark256
|
||||||
dark := parseTheme(theme, "dark")
|
base, dark, _ := parseTheme(theme, "dark")
|
||||||
|
if *dark != *base {
|
||||||
|
t.Errorf("incorrect base theme returned")
|
||||||
|
}
|
||||||
if *dark != *theme {
|
if *dark != *theme {
|
||||||
t.Errorf("colors should be equivalent")
|
t.Errorf("colors should be equivalent")
|
||||||
}
|
}
|
||||||
@@ -282,7 +313,10 @@ func TestColorSpec(t *testing.T) {
|
|||||||
t.Errorf("point should not be equivalent")
|
t.Errorf("point should not be equivalent")
|
||||||
}
|
}
|
||||||
|
|
||||||
light := parseTheme(theme, "dark,light")
|
base, light, _ := parseTheme(theme, "dark,light")
|
||||||
|
if *light != *base {
|
||||||
|
t.Errorf("incorrect base theme returned")
|
||||||
|
}
|
||||||
if *light == *theme {
|
if *light == *theme {
|
||||||
t.Errorf("should not be equivalent")
|
t.Errorf("should not be equivalent")
|
||||||
}
|
}
|
||||||
@@ -293,8 +327,8 @@ func TestColorSpec(t *testing.T) {
|
|||||||
t.Errorf("point should not be equivalent")
|
t.Errorf("point should not be equivalent")
|
||||||
}
|
}
|
||||||
|
|
||||||
customized := parseTheme(theme, "fg:231,bg:232")
|
_, customized, _ := parseTheme(theme, "fg:231,bg:232")
|
||||||
if customized.Fg != 231 || customized.Bg != 232 {
|
if customized.Fg.Color != 231 || customized.Bg.Color != 232 {
|
||||||
t.Errorf("color not customized")
|
t.Errorf("color not customized")
|
||||||
}
|
}
|
||||||
if *tui.Dark256 == *customized {
|
if *tui.Dark256 == *customized {
|
||||||
@@ -306,55 +340,46 @@ func TestColorSpec(t *testing.T) {
|
|||||||
t.Errorf("colors should now be equivalent: %v, %v", tui.Dark256, customized)
|
t.Errorf("colors should now be equivalent: %v, %v", tui.Dark256, customized)
|
||||||
}
|
}
|
||||||
|
|
||||||
customized = parseTheme(theme, "fg:231,dark,bg:232")
|
_, customized, _ = parseTheme(theme, "fg:231,dark bg:232")
|
||||||
if customized.Fg != tui.Dark256.Fg || customized.Bg == tui.Dark256.Bg {
|
if customized.Fg != tui.Dark256.Fg || customized.Bg == tui.Dark256.Bg {
|
||||||
t.Errorf("color not customized")
|
t.Errorf("color not customized")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseNilTheme(t *testing.T) {
|
|
||||||
var theme *tui.ColorTheme
|
|
||||||
newTheme := parseTheme(theme, "prompt:12")
|
|
||||||
if newTheme != nil {
|
|
||||||
t.Errorf("color is disabled. keep it that way.")
|
|
||||||
}
|
|
||||||
newTheme = parseTheme(theme, "prompt:12,dark,prompt:13")
|
|
||||||
if newTheme.Prompt != 13 {
|
|
||||||
t.Errorf("color should now be enabled and customized")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDefaultCtrlNP(t *testing.T) {
|
func TestDefaultCtrlNP(t *testing.T) {
|
||||||
check := func(words []string, key int, expected actionType) {
|
index := 0
|
||||||
|
check := func(words []string, et tui.EventType, expected actionType) {
|
||||||
|
e := et.AsEvent()
|
||||||
opts := defaultOptions()
|
opts := defaultOptions()
|
||||||
parseOptions(opts, words)
|
parseOptions(&index, opts, words)
|
||||||
postProcessOptions(opts)
|
postProcessOptions(opts)
|
||||||
if opts.Keymap[key][0].t != expected {
|
if opts.Keymap[e][0].t != expected {
|
||||||
t.Error()
|
t.Error()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
check([]string{}, tui.CtrlN, actDown)
|
check([]string{}, tui.CtrlN, actDownMatch)
|
||||||
check([]string{}, tui.CtrlP, actUp)
|
check([]string{}, tui.CtrlP, actUpMatch)
|
||||||
|
|
||||||
check([]string{"--bind=ctrl-n:accept"}, tui.CtrlN, actAccept)
|
check([]string{"--bind=ctrl-n:accept"}, tui.CtrlN, actAccept)
|
||||||
check([]string{"--bind=ctrl-p:accept"}, tui.CtrlP, actAccept)
|
check([]string{"--bind=ctrl-p:accept"}, tui.CtrlP, actAccept)
|
||||||
|
|
||||||
f, _ := ioutil.TempFile("", "fzf-history")
|
f, _ := os.CreateTemp("", "fzf-history")
|
||||||
f.Close()
|
f.Close()
|
||||||
hist := "--history=" + f.Name()
|
hist := "--history=" + f.Name()
|
||||||
check([]string{hist}, tui.CtrlN, actNextHistory)
|
check([]string{hist}, tui.CtrlN, actNextHistory)
|
||||||
check([]string{hist}, tui.CtrlP, actPreviousHistory)
|
check([]string{hist}, tui.CtrlP, actPrevHistory)
|
||||||
|
|
||||||
check([]string{hist, "--bind=ctrl-n:accept"}, tui.CtrlN, actAccept)
|
check([]string{hist, "--bind=ctrl-n:accept"}, tui.CtrlN, actAccept)
|
||||||
check([]string{hist, "--bind=ctrl-n:accept"}, tui.CtrlP, actPreviousHistory)
|
check([]string{hist, "--bind=ctrl-n:accept"}, tui.CtrlP, actPrevHistory)
|
||||||
|
|
||||||
check([]string{hist, "--bind=ctrl-p:accept"}, tui.CtrlN, actNextHistory)
|
check([]string{hist, "--bind=ctrl-p:accept"}, tui.CtrlN, actNextHistory)
|
||||||
check([]string{hist, "--bind=ctrl-p:accept"}, tui.CtrlP, actAccept)
|
check([]string{hist, "--bind=ctrl-p:accept"}, tui.CtrlP, actAccept)
|
||||||
}
|
}
|
||||||
|
|
||||||
func optsFor(words ...string) *Options {
|
func optsFor(words ...string) *Options {
|
||||||
|
index := 0
|
||||||
opts := defaultOptions()
|
opts := defaultOptions()
|
||||||
parseOptions(opts, words)
|
parseOptions(&index, opts, words)
|
||||||
postProcessOptions(opts)
|
postProcessOptions(opts)
|
||||||
return opts
|
return opts
|
||||||
}
|
}
|
||||||
@@ -386,23 +411,26 @@ func TestPreviewOpts(t *testing.T) {
|
|||||||
opts.Preview.size.size == 50) {
|
opts.Preview.size.size == 50) {
|
||||||
t.Error()
|
t.Error()
|
||||||
}
|
}
|
||||||
opts = optsFor("--preview", "cat {}", "--preview-window=left:15:hidden:wrap")
|
opts = optsFor("--preview", "cat {}", "--preview-window=left:15,hidden,wrap:+{1}-/2")
|
||||||
if !(opts.Preview.command == "cat {}" &&
|
if !(opts.Preview.command == "cat {}" &&
|
||||||
opts.Preview.hidden == true &&
|
opts.Preview.hidden == true &&
|
||||||
opts.Preview.wrap == true &&
|
opts.Preview.wrap == true &&
|
||||||
opts.Preview.position == posLeft &&
|
opts.Preview.position == posLeft &&
|
||||||
|
opts.Preview.scroll == "+{1}-/2" &&
|
||||||
opts.Preview.size.percent == false &&
|
opts.Preview.size.percent == false &&
|
||||||
opts.Preview.size.size == 15+2+2) {
|
opts.Preview.size.size == 15) {
|
||||||
t.Error(opts.Preview)
|
t.Error(opts.Preview)
|
||||||
}
|
}
|
||||||
opts = optsFor("--preview-window=up:15:wrap:hidden", "--preview-window=down")
|
opts = optsFor("--preview-window=up,15,wrap,hidden,+{1}+3-1-2/2", "--preview-window=down", "--preview-window=cycle")
|
||||||
if !(opts.Preview.command == "" &&
|
if !(opts.Preview.command == "" &&
|
||||||
opts.Preview.hidden == false &&
|
opts.Preview.hidden == true &&
|
||||||
opts.Preview.wrap == false &&
|
opts.Preview.wrap == true &&
|
||||||
|
opts.Preview.cycle == true &&
|
||||||
opts.Preview.position == posDown &&
|
opts.Preview.position == posDown &&
|
||||||
opts.Preview.size.percent == true &&
|
opts.Preview.scroll == "+{1}+3-1-2/2" &&
|
||||||
opts.Preview.size.size == 50) {
|
opts.Preview.size.percent == false &&
|
||||||
t.Error(opts.Preview)
|
opts.Preview.size.size == 15) {
|
||||||
|
t.Error(opts.Preview.size.size)
|
||||||
}
|
}
|
||||||
opts = optsFor("--preview-window=up:15:wrap:hidden")
|
opts = optsFor("--preview-window=up:15:wrap:hidden")
|
||||||
if !(opts.Preview.command == "" &&
|
if !(opts.Preview.command == "" &&
|
||||||
@@ -410,7 +438,76 @@ func TestPreviewOpts(t *testing.T) {
|
|||||||
opts.Preview.wrap == true &&
|
opts.Preview.wrap == true &&
|
||||||
opts.Preview.position == posUp &&
|
opts.Preview.position == posUp &&
|
||||||
opts.Preview.size.percent == false &&
|
opts.Preview.size.percent == false &&
|
||||||
opts.Preview.size.size == 15+2) {
|
opts.Preview.size.size == 15) {
|
||||||
|
t.Error(opts.Preview)
|
||||||
|
}
|
||||||
|
opts = optsFor("--preview=foo", "--preview-window=up", "--preview-window=default:70%")
|
||||||
|
if !(opts.Preview.command == "foo" &&
|
||||||
|
opts.Preview.position == posRight &&
|
||||||
|
opts.Preview.size.percent == true &&
|
||||||
|
opts.Preview.size.size == 70) {
|
||||||
t.Error(opts.Preview)
|
t.Error(opts.Preview)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAdditiveExpect(t *testing.T) {
|
||||||
|
opts := optsFor("--expect=a", "--expect", "b", "--expect=c")
|
||||||
|
if len(opts.Expect) != 3 {
|
||||||
|
t.Error(opts.Expect)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateSign(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
inputSign string
|
||||||
|
isValid bool
|
||||||
|
}{
|
||||||
|
{"> ", true},
|
||||||
|
{"아", true},
|
||||||
|
{"😀", true},
|
||||||
|
{">>>", false},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
err := validateSign(testCase.inputSign, "", 2)
|
||||||
|
if testCase.isValid && err != nil {
|
||||||
|
t.Errorf("Input sign `%s` caused error", testCase.inputSign)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !testCase.isValid && err == nil {
|
||||||
|
t.Errorf("Input sign `%s` did not cause error", testCase.inputSign)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseSingleActionList(t *testing.T) {
|
||||||
|
actions, _ := parseSingleActionList("Execute@foo+bar,baz@+up+up+reload:down+down")
|
||||||
|
if len(actions) != 4 {
|
||||||
|
t.Errorf("Invalid number of actions parsed:%d", len(actions))
|
||||||
|
}
|
||||||
|
if actions[0].t != actExecute || actions[0].a != "foo+bar,baz" {
|
||||||
|
t.Errorf("Invalid action parsed: %v", actions[0])
|
||||||
|
}
|
||||||
|
if actions[1].t != actUp || actions[2].t != actUp {
|
||||||
|
t.Errorf("Invalid action parsed: %v / %v", actions[1], actions[2])
|
||||||
|
}
|
||||||
|
if actions[3].t != actReload || actions[3].a != "down+down" {
|
||||||
|
t.Errorf("Invalid action parsed: %v", actions[3])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseSingleActionListError(t *testing.T) {
|
||||||
|
_, err := parseSingleActionList("change-query(foobar)baz")
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("Failed to detect error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMaskActionContents(t *testing.T) {
|
||||||
|
original := ":execute((f)(o)(o)(b)(a)(r))+change-query@qu@ry@+up,x:reload:hello:world"
|
||||||
|
expected := ":execute +change-query +up,x:reload "
|
||||||
|
masked := maskActionContents(original)
|
||||||
|
if masked != expected {
|
||||||
|
t.Errorf("Not masked: %s", masked)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
265
src/pattern.go
265
src/pattern.go
@@ -1,6 +1,7 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -10,18 +11,19 @@ import (
|
|||||||
|
|
||||||
// fuzzy
|
// fuzzy
|
||||||
// 'exact
|
// 'exact
|
||||||
// ^exact-prefix
|
// ^prefix-exact
|
||||||
// exact-suffix$
|
// suffix-exact$
|
||||||
// !not-fuzzy
|
// !inverse-exact
|
||||||
// !'not-exact
|
// !'inverse-fuzzy
|
||||||
// !^not-exact-prefix
|
// !^inverse-prefix-exact
|
||||||
// !not-exact-suffix$
|
// !inverse-suffix-exact$
|
||||||
|
|
||||||
type termType int
|
type termType int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
termFuzzy termType = iota
|
termFuzzy termType = iota
|
||||||
termExact
|
termExact
|
||||||
|
termExactBoundary
|
||||||
termPrefix
|
termPrefix
|
||||||
termSuffix
|
termSuffix
|
||||||
termEqual
|
termEqual
|
||||||
@@ -32,7 +34,12 @@ type term struct {
|
|||||||
inv bool
|
inv bool
|
||||||
text []rune
|
text []rune
|
||||||
caseSensitive bool
|
caseSensitive bool
|
||||||
origText []rune
|
normalize bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the string representation of a term.
|
||||||
|
func (t term) String() string {
|
||||||
|
return fmt.Sprintf("term{typ: %d, inv: %v, text: []rune(%q), caseSensitive: %v}", t.typ, t.inv, string(t.text), t.caseSensitive)
|
||||||
}
|
}
|
||||||
|
|
||||||
type termSet []term
|
type termSet []term
|
||||||
@@ -45,70 +52,76 @@ type Pattern struct {
|
|||||||
caseSensitive bool
|
caseSensitive bool
|
||||||
normalize bool
|
normalize bool
|
||||||
forward bool
|
forward bool
|
||||||
|
withPos bool
|
||||||
text []rune
|
text []rune
|
||||||
termSets []termSet
|
termSets []termSet
|
||||||
|
sortable bool
|
||||||
cacheable bool
|
cacheable bool
|
||||||
|
cacheKey string
|
||||||
delimiter Delimiter
|
delimiter Delimiter
|
||||||
nth []Range
|
nth []Range
|
||||||
|
revision revision
|
||||||
procFun map[termType]algo.Algo
|
procFun map[termType]algo.Algo
|
||||||
|
cache *ChunkCache
|
||||||
|
denylist map[int32]struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var _splitRegex *regexp.Regexp
|
||||||
_patternCache map[string]*Pattern
|
|
||||||
_splitRegex *regexp.Regexp
|
|
||||||
_cache ChunkCache
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
_splitRegex = regexp.MustCompile("\\s+")
|
_splitRegex = regexp.MustCompile(" +")
|
||||||
clearPatternCache()
|
|
||||||
clearChunkCache()
|
|
||||||
}
|
|
||||||
|
|
||||||
func clearPatternCache() {
|
|
||||||
// We can uniquely identify the pattern for a given string since
|
|
||||||
// search mode and caseMode do not change while the program is running
|
|
||||||
_patternCache = make(map[string]*Pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
func clearChunkCache() {
|
|
||||||
_cache = NewChunkCache()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildPattern builds Pattern object from the given arguments
|
// BuildPattern builds Pattern object from the given arguments
|
||||||
func BuildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case, normalize bool, forward bool,
|
func BuildPattern(cache *ChunkCache, patternCache map[string]*Pattern, fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case, normalize bool, forward bool,
|
||||||
cacheable bool, nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
withPos bool, cacheable bool, nth []Range, delimiter Delimiter, revision revision, runes []rune, denylist map[int32]struct{}) *Pattern {
|
||||||
|
|
||||||
var asString string
|
var asString string
|
||||||
if extended {
|
if extended {
|
||||||
asString = strings.Trim(string(runes), " ")
|
asString = strings.TrimLeft(string(runes), " ")
|
||||||
|
for strings.HasSuffix(asString, " ") && !strings.HasSuffix(asString, "\\ ") {
|
||||||
|
asString = asString[:len(asString)-1]
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
asString = string(runes)
|
asString = string(runes)
|
||||||
}
|
}
|
||||||
|
|
||||||
cached, found := _patternCache[asString]
|
// We can uniquely identify the pattern for a given string since
|
||||||
|
// search mode and caseMode do not change while the program is running
|
||||||
|
cached, found := patternCache[asString]
|
||||||
if found {
|
if found {
|
||||||
return cached
|
return cached
|
||||||
}
|
}
|
||||||
|
|
||||||
caseSensitive := true
|
caseSensitive := true
|
||||||
|
sortable := true
|
||||||
termSets := []termSet{}
|
termSets := []termSet{}
|
||||||
|
|
||||||
if extended {
|
if extended {
|
||||||
termSets = parseTerms(fuzzy, caseMode, normalize, asString)
|
termSets = parseTerms(fuzzy, caseMode, normalize, asString)
|
||||||
|
// We should not sort the result if there are only inverse search terms
|
||||||
|
sortable = false
|
||||||
Loop:
|
Loop:
|
||||||
for _, termSet := range termSets {
|
for _, termSet := range termSets {
|
||||||
for idx, term := range termSet {
|
for idx, term := range termSet {
|
||||||
|
if !term.inv {
|
||||||
|
sortable = true
|
||||||
|
}
|
||||||
// If the query contains inverse search terms or OR operators,
|
// If the query contains inverse search terms or OR operators,
|
||||||
// we cannot cache the search scope
|
// we cannot cache the search scope
|
||||||
if !cacheable || idx > 0 || term.inv {
|
if !cacheable || idx > 0 || term.inv || fuzzy && term.typ != termFuzzy || !fuzzy && term.typ != termExact {
|
||||||
cacheable = false
|
cacheable = false
|
||||||
break Loop
|
if sortable {
|
||||||
|
// Can't break until we see at least one non-inverse term
|
||||||
|
break Loop
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
lowerString := strings.ToLower(asString)
|
lowerString := strings.ToLower(asString)
|
||||||
|
normalize = normalize &&
|
||||||
|
lowerString == string(algo.NormalizeRunes([]rune(lowerString)))
|
||||||
caseSensitive = caseMode == CaseRespect ||
|
caseSensitive = caseMode == CaseRespect ||
|
||||||
caseMode == CaseSmart && lowerString != asString
|
caseMode == CaseSmart && lowerString != asString
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
@@ -123,45 +136,57 @@ func BuildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case,
|
|||||||
caseSensitive: caseSensitive,
|
caseSensitive: caseSensitive,
|
||||||
normalize: normalize,
|
normalize: normalize,
|
||||||
forward: forward,
|
forward: forward,
|
||||||
|
withPos: withPos,
|
||||||
text: []rune(asString),
|
text: []rune(asString),
|
||||||
termSets: termSets,
|
termSets: termSets,
|
||||||
|
sortable: sortable,
|
||||||
cacheable: cacheable,
|
cacheable: cacheable,
|
||||||
nth: nth,
|
nth: nth,
|
||||||
|
revision: revision,
|
||||||
delimiter: delimiter,
|
delimiter: delimiter,
|
||||||
|
cache: cache,
|
||||||
|
denylist: denylist,
|
||||||
procFun: make(map[termType]algo.Algo)}
|
procFun: make(map[termType]algo.Algo)}
|
||||||
|
|
||||||
|
ptr.cacheKey = ptr.buildCacheKey()
|
||||||
ptr.procFun[termFuzzy] = fuzzyAlgo
|
ptr.procFun[termFuzzy] = fuzzyAlgo
|
||||||
ptr.procFun[termEqual] = algo.EqualMatch
|
ptr.procFun[termEqual] = algo.EqualMatch
|
||||||
ptr.procFun[termExact] = algo.ExactMatchNaive
|
ptr.procFun[termExact] = algo.ExactMatchNaive
|
||||||
|
ptr.procFun[termExactBoundary] = algo.ExactMatchBoundary
|
||||||
ptr.procFun[termPrefix] = algo.PrefixMatch
|
ptr.procFun[termPrefix] = algo.PrefixMatch
|
||||||
ptr.procFun[termSuffix] = algo.SuffixMatch
|
ptr.procFun[termSuffix] = algo.SuffixMatch
|
||||||
|
|
||||||
_patternCache[asString] = ptr
|
patternCache[asString] = ptr
|
||||||
return ptr
|
return ptr
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet {
|
func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet {
|
||||||
|
str = strings.ReplaceAll(str, "\\ ", "\t")
|
||||||
tokens := _splitRegex.Split(str, -1)
|
tokens := _splitRegex.Split(str, -1)
|
||||||
sets := []termSet{}
|
sets := []termSet{}
|
||||||
set := termSet{}
|
set := termSet{}
|
||||||
switchSet := false
|
switchSet := false
|
||||||
|
afterBar := false
|
||||||
for _, token := range tokens {
|
for _, token := range tokens {
|
||||||
typ, inv, text := termFuzzy, false, token
|
typ, inv, text := termFuzzy, false, strings.ReplaceAll(token, "\t", " ")
|
||||||
lowerText := strings.ToLower(text)
|
lowerText := strings.ToLower(text)
|
||||||
caseSensitive := caseMode == CaseRespect ||
|
caseSensitive := caseMode == CaseRespect ||
|
||||||
caseMode == CaseSmart && text != lowerText
|
caseMode == CaseSmart && text != lowerText
|
||||||
|
normalizeTerm := normalize &&
|
||||||
|
lowerText == string(algo.NormalizeRunes([]rune(lowerText)))
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
text = lowerText
|
text = lowerText
|
||||||
}
|
}
|
||||||
origText := []rune(text)
|
|
||||||
if !fuzzy {
|
if !fuzzy {
|
||||||
typ = termExact
|
typ = termExact
|
||||||
}
|
}
|
||||||
|
|
||||||
if text == "|" {
|
if len(set) > 0 && !afterBar && text == "|" {
|
||||||
switchSet = false
|
switchSet = false
|
||||||
|
afterBar = true
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
afterBar = false
|
||||||
|
|
||||||
if strings.HasPrefix(text, "!") {
|
if strings.HasPrefix(text, "!") {
|
||||||
inv = true
|
inv = true
|
||||||
@@ -169,26 +194,29 @@ func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet
|
|||||||
text = text[1:]
|
text = text[1:]
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.HasPrefix(text, "'") {
|
if text != "$" && strings.HasSuffix(text, "$") {
|
||||||
|
typ = termSuffix
|
||||||
|
text = text[:len(text)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(text) > 2 && strings.HasPrefix(text, "'") && strings.HasSuffix(text, "'") {
|
||||||
|
typ = termExactBoundary
|
||||||
|
text = text[1 : len(text)-1]
|
||||||
|
} else if strings.HasPrefix(text, "'") {
|
||||||
// Flip exactness
|
// Flip exactness
|
||||||
if fuzzy && !inv {
|
if fuzzy && !inv {
|
||||||
typ = termExact
|
typ = termExact
|
||||||
text = text[1:]
|
|
||||||
} else {
|
} else {
|
||||||
typ = termFuzzy
|
typ = termFuzzy
|
||||||
text = text[1:]
|
|
||||||
}
|
}
|
||||||
|
text = text[1:]
|
||||||
} else if strings.HasPrefix(text, "^") {
|
} else if strings.HasPrefix(text, "^") {
|
||||||
if strings.HasSuffix(text, "$") {
|
if typ == termSuffix {
|
||||||
typ = termEqual
|
typ = termEqual
|
||||||
text = text[1 : len(text)-1]
|
|
||||||
} else {
|
} else {
|
||||||
typ = termPrefix
|
typ = termPrefix
|
||||||
text = text[1:]
|
|
||||||
}
|
}
|
||||||
} else if strings.HasSuffix(text, "$") {
|
text = text[1:]
|
||||||
typ = termSuffix
|
|
||||||
text = text[:len(text)-1]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(text) > 0 {
|
if len(text) > 0 {
|
||||||
@@ -197,7 +225,7 @@ func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet
|
|||||||
set = termSet{}
|
set = termSet{}
|
||||||
}
|
}
|
||||||
textRunes := []rune(text)
|
textRunes := []rune(text)
|
||||||
if normalize {
|
if normalizeTerm {
|
||||||
textRunes = algo.NormalizeRunes(textRunes)
|
textRunes = algo.NormalizeRunes(textRunes)
|
||||||
}
|
}
|
||||||
set = append(set, term{
|
set = append(set, term{
|
||||||
@@ -205,7 +233,7 @@ func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet
|
|||||||
inv: inv,
|
inv: inv,
|
||||||
text: textRunes,
|
text: textRunes,
|
||||||
caseSensitive: caseSensitive,
|
caseSensitive: caseSensitive,
|
||||||
origText: origText})
|
normalize: normalizeTerm})
|
||||||
switchSet = true
|
switchSet = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -217,6 +245,9 @@ func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet
|
|||||||
|
|
||||||
// IsEmpty returns true if the pattern is effectively empty
|
// IsEmpty returns true if the pattern is effectively empty
|
||||||
func (p *Pattern) IsEmpty() bool {
|
func (p *Pattern) IsEmpty() bool {
|
||||||
|
if len(p.denylist) > 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if !p.extended {
|
if !p.extended {
|
||||||
return len(p.text) == 0
|
return len(p.text) == 0
|
||||||
}
|
}
|
||||||
@@ -228,68 +259,82 @@ func (p *Pattern) AsString() string {
|
|||||||
return string(p.text)
|
return string(p.text)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CacheKey is used to build string to be used as the key of result cache
|
func (p *Pattern) buildCacheKey() string {
|
||||||
func (p *Pattern) CacheKey() string {
|
|
||||||
if !p.extended {
|
if !p.extended {
|
||||||
return p.AsString()
|
return p.AsString()
|
||||||
}
|
}
|
||||||
cacheableTerms := []string{}
|
cacheableTerms := []string{}
|
||||||
for _, termSet := range p.termSets {
|
for _, termSet := range p.termSets {
|
||||||
if len(termSet) == 1 && !termSet[0].inv && (p.fuzzy || termSet[0].typ == termExact) {
|
if len(termSet) == 1 && !termSet[0].inv && (p.fuzzy || termSet[0].typ == termExact) {
|
||||||
cacheableTerms = append(cacheableTerms, string(termSet[0].origText))
|
cacheableTerms = append(cacheableTerms, string(termSet[0].text))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return strings.Join(cacheableTerms, " ")
|
return strings.Join(cacheableTerms, "\t")
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheKey is used to build string to be used as the key of result cache
|
||||||
|
func (p *Pattern) CacheKey() string {
|
||||||
|
return p.cacheKey
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match returns the list of matches Items in the given Chunk
|
// Match returns the list of matches Items in the given Chunk
|
||||||
func (p *Pattern) Match(chunk *Chunk, slab *util.Slab) []*Result {
|
func (p *Pattern) Match(chunk *Chunk, slab *util.Slab) []Result {
|
||||||
// ChunkCache: Exact match
|
// ChunkCache: Exact match
|
||||||
cacheKey := p.CacheKey()
|
cacheKey := p.CacheKey()
|
||||||
if p.cacheable {
|
if p.cacheable {
|
||||||
if cached, found := _cache.Find(chunk, cacheKey); found {
|
if cached := p.cache.Lookup(chunk, cacheKey); cached != nil {
|
||||||
return cached
|
return cached
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prefix/suffix cache
|
// Prefix/suffix cache
|
||||||
var space []*Result
|
space := p.cache.Search(chunk, cacheKey)
|
||||||
Loop:
|
|
||||||
for idx := 1; idx < len(cacheKey); idx++ {
|
|
||||||
// [---------| ] | [ |---------]
|
|
||||||
// [--------| ] | [ |--------]
|
|
||||||
// [-------| ] | [ |-------]
|
|
||||||
prefix := cacheKey[:len(cacheKey)-idx]
|
|
||||||
suffix := cacheKey[idx:]
|
|
||||||
for _, substr := range [2]*string{&prefix, &suffix} {
|
|
||||||
if cached, found := _cache.Find(chunk, *substr); found {
|
|
||||||
space = cached
|
|
||||||
break Loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
matches := p.matchChunk(chunk, space, slab)
|
matches := p.matchChunk(chunk, space, slab)
|
||||||
|
|
||||||
if p.cacheable {
|
if p.cacheable {
|
||||||
_cache.Add(chunk, cacheKey, matches)
|
p.cache.Add(chunk, cacheKey, matches)
|
||||||
}
|
}
|
||||||
return matches
|
return matches
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) matchChunk(chunk *Chunk, space []*Result, slab *util.Slab) []*Result {
|
func (p *Pattern) matchChunk(chunk *Chunk, space []Result, slab *util.Slab) []Result {
|
||||||
matches := []*Result{}
|
matches := []Result{}
|
||||||
|
|
||||||
|
if len(p.denylist) == 0 {
|
||||||
|
// Huge code duplication for minimizing unnecessary map lookups
|
||||||
|
if space == nil {
|
||||||
|
for idx := 0; idx < chunk.count; idx++ {
|
||||||
|
if match, _, _ := p.MatchItem(&chunk.items[idx], p.withPos, slab); match != nil {
|
||||||
|
matches = append(matches, *match)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, result := range space {
|
||||||
|
if match, _, _ := p.MatchItem(result.item, p.withPos, slab); match != nil {
|
||||||
|
matches = append(matches, *match)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matches
|
||||||
|
}
|
||||||
|
|
||||||
if space == nil {
|
if space == nil {
|
||||||
for _, item := range *chunk {
|
for idx := 0; idx < chunk.count; idx++ {
|
||||||
if match, _, _ := p.MatchItem(item, false, slab); match != nil {
|
if _, prs := p.denylist[chunk.items[idx].Index()]; prs {
|
||||||
matches = append(matches, match)
|
continue
|
||||||
|
}
|
||||||
|
if match, _, _ := p.MatchItem(&chunk.items[idx], p.withPos, slab); match != nil {
|
||||||
|
matches = append(matches, *match)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for _, result := range space {
|
for _, result := range space {
|
||||||
if match, _, _ := p.MatchItem(result.item, false, slab); match != nil {
|
if _, prs := p.denylist[result.item.Index()]; prs {
|
||||||
matches = append(matches, match)
|
continue
|
||||||
|
}
|
||||||
|
if match, _, _ := p.MatchItem(result.item, p.withPos, slab); match != nil {
|
||||||
|
matches = append(matches, *match)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -299,32 +344,43 @@ func (p *Pattern) matchChunk(chunk *Chunk, space []*Result, slab *util.Slab) []*
|
|||||||
// MatchItem returns true if the Item is a match
|
// MatchItem returns true if the Item is a match
|
||||||
func (p *Pattern) MatchItem(item *Item, withPos bool, slab *util.Slab) (*Result, []Offset, *[]int) {
|
func (p *Pattern) MatchItem(item *Item, withPos bool, slab *util.Slab) (*Result, []Offset, *[]int) {
|
||||||
if p.extended {
|
if p.extended {
|
||||||
if offsets, bonus, trimLen, pos := p.extendedMatch(item, withPos, slab); len(offsets) == len(p.termSets) {
|
if offsets, bonus, pos := p.extendedMatch(item, withPos, slab); len(offsets) == len(p.termSets) {
|
||||||
return buildResult(item, offsets, bonus, trimLen), offsets, pos
|
result := buildResult(item, offsets, bonus)
|
||||||
|
return &result, offsets, pos
|
||||||
}
|
}
|
||||||
return nil, nil, nil
|
return nil, nil, nil
|
||||||
}
|
}
|
||||||
offset, bonus, trimLen, pos := p.basicMatch(item, withPos, slab)
|
offset, bonus, pos := p.basicMatch(item, withPos, slab)
|
||||||
if sidx := offset[0]; sidx >= 0 {
|
if sidx := offset[0]; sidx >= 0 {
|
||||||
offsets := []Offset{offset}
|
offsets := []Offset{offset}
|
||||||
return buildResult(item, offsets, bonus, trimLen), offsets, pos
|
result := buildResult(item, offsets, bonus)
|
||||||
|
return &result, offsets, pos
|
||||||
}
|
}
|
||||||
return nil, nil, nil
|
return nil, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) basicMatch(item *Item, withPos bool, slab *util.Slab) (Offset, int, int, *[]int) {
|
func (p *Pattern) basicMatch(item *Item, withPos bool, slab *util.Slab) (Offset, int, *[]int) {
|
||||||
input := p.prepareInput(item)
|
var input []Token
|
||||||
|
if len(p.nth) == 0 {
|
||||||
|
input = []Token{{text: &item.text, prefixLength: 0}}
|
||||||
|
} else {
|
||||||
|
input = p.transformInput(item)
|
||||||
|
}
|
||||||
if p.fuzzy {
|
if p.fuzzy {
|
||||||
return p.iter(p.fuzzyAlgo, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
return p.iter(p.fuzzyAlgo, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
||||||
}
|
}
|
||||||
return p.iter(algo.ExactMatchNaive, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
return p.iter(algo.ExactMatchNaive, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Offset, int, int, *[]int) {
|
func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Offset, int, *[]int) {
|
||||||
input := p.prepareInput(item)
|
var input []Token
|
||||||
|
if len(p.nth) == 0 {
|
||||||
|
input = []Token{{text: &item.text, prefixLength: 0}}
|
||||||
|
} else {
|
||||||
|
input = p.transformInput(item)
|
||||||
|
}
|
||||||
offsets := []Offset{}
|
offsets := []Offset{}
|
||||||
var totalScore int
|
var totalScore int
|
||||||
var totalTrimLen int
|
|
||||||
var allPos *[]int
|
var allPos *[]int
|
||||||
if withPos {
|
if withPos {
|
||||||
allPos = &[]int{}
|
allPos = &[]int{}
|
||||||
@@ -332,16 +388,15 @@ func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Of
|
|||||||
for _, termSet := range p.termSets {
|
for _, termSet := range p.termSets {
|
||||||
var offset Offset
|
var offset Offset
|
||||||
var currentScore int
|
var currentScore int
|
||||||
var trimLen int
|
|
||||||
matched := false
|
matched := false
|
||||||
for _, term := range termSet {
|
for _, term := range termSet {
|
||||||
pfun := p.procFun[term.typ]
|
pfun := p.procFun[term.typ]
|
||||||
off, score, tLen, pos := p.iter(pfun, input, term.caseSensitive, p.normalize, p.forward, term.text, withPos, slab)
|
off, score, pos := p.iter(pfun, input, term.caseSensitive, term.normalize, p.forward, term.text, withPos, slab)
|
||||||
if sidx := off[0]; sidx >= 0 {
|
if sidx := off[0]; sidx >= 0 {
|
||||||
if term.inv {
|
if term.inv {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
offset, currentScore, trimLen = off, score, tLen
|
offset, currentScore = off, score
|
||||||
matched = true
|
matched = true
|
||||||
if withPos {
|
if withPos {
|
||||||
if pos != nil {
|
if pos != nil {
|
||||||
@@ -354,7 +409,7 @@ func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Of
|
|||||||
}
|
}
|
||||||
break
|
break
|
||||||
} else if term.inv {
|
} else if term.inv {
|
||||||
offset, currentScore, trimLen = Offset{0, 0}, 0, 0
|
offset, currentScore = Offset{0, 0}, 0
|
||||||
matched = true
|
matched = true
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -362,31 +417,35 @@ func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Of
|
|||||||
if matched {
|
if matched {
|
||||||
offsets = append(offsets, offset)
|
offsets = append(offsets, offset)
|
||||||
totalScore += currentScore
|
totalScore += currentScore
|
||||||
totalTrimLen += trimLen
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return offsets, totalScore, totalTrimLen, allPos
|
return offsets, totalScore, allPos
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) prepareInput(item *Item) []Token {
|
func (p *Pattern) transformInput(item *Item) []Token {
|
||||||
if item.transformed != nil {
|
if item.transformed != nil {
|
||||||
return item.transformed
|
transformed := *item.transformed
|
||||||
|
if transformed.revision == p.revision {
|
||||||
|
return transformed.tokens
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret []Token
|
tokens := Tokenize(item.text.ToString(), p.delimiter)
|
||||||
if len(p.nth) == 0 {
|
ret := Transform(tokens, p.nth)
|
||||||
ret = []Token{Token{text: &item.text, prefixLength: 0, trimLength: int32(item.text.TrimLength())}}
|
// Strip the last delimiter to allow suffix match
|
||||||
} else {
|
if len(ret) > 0 && !p.delimiter.IsAwk() {
|
||||||
tokens := Tokenize(item.text, p.delimiter)
|
chars := ret[len(ret)-1].text
|
||||||
ret = Transform(tokens, p.nth)
|
stripped := StripLastDelimiter(chars.ToString(), p.delimiter)
|
||||||
|
newChars := util.ToChars(stringBytes(stripped))
|
||||||
|
ret[len(ret)-1].text = &newChars
|
||||||
}
|
}
|
||||||
item.transformed = ret
|
item.transformed = &transformed{p.revision, ret}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) iter(pfun algo.Algo, tokens []Token, caseSensitive bool, normalize bool, forward bool, pattern []rune, withPos bool, slab *util.Slab) (Offset, int, int, *[]int) {
|
func (p *Pattern) iter(pfun algo.Algo, tokens []Token, caseSensitive bool, normalize bool, forward bool, pattern []rune, withPos bool, slab *util.Slab) (Offset, int, *[]int) {
|
||||||
for _, part := range tokens {
|
for _, part := range tokens {
|
||||||
if res, pos := pfun(caseSensitive, normalize, forward, *part.text, pattern, withPos, slab); res.Start >= 0 {
|
if res, pos := pfun(caseSensitive, normalize, forward, part.text, pattern, withPos, slab); res.Start >= 0 {
|
||||||
sidx := int32(res.Start) + part.prefixLength
|
sidx := int32(res.Start) + part.prefixLength
|
||||||
eidx := int32(res.End) + part.prefixLength
|
eidx := int32(res.End) + part.prefixLength
|
||||||
if pos != nil {
|
if pos != nil {
|
||||||
@@ -394,8 +453,8 @@ func (p *Pattern) iter(pfun algo.Algo, tokens []Token, caseSensitive bool, norma
|
|||||||
(*pos)[idx] += int(part.prefixLength)
|
(*pos)[idx] += int(part.prefixLength)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Offset{sidx, eidx}, res.Score, int(part.trimLength), pos
|
return Offset{sidx, eidx}, res.Score, pos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Offset{-1, -1}, 0, -1, nil
|
return Offset{-1, -1}, 0, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ func init() {
|
|||||||
|
|
||||||
func TestParseTermsExtended(t *testing.T) {
|
func TestParseTermsExtended(t *testing.T) {
|
||||||
terms := parseTerms(true, CaseSmart, false,
|
terms := parseTerms(true, CaseSmart, false,
|
||||||
"| aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$ | ^iii$ ^xxx | 'yyy | | zzz$ | !ZZZ |")
|
"aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$ | ^iii$ ^xxx | 'yyy | zzz$ | !ZZZ |")
|
||||||
if len(terms) != 9 ||
|
if len(terms) != 9 ||
|
||||||
terms[0][0].typ != termFuzzy || terms[0][0].inv ||
|
terms[0][0].typ != termFuzzy || terms[0][0].inv ||
|
||||||
terms[1][0].typ != termExact || terms[1][0].inv ||
|
terms[1][0].typ != termExact || terms[1][0].inv ||
|
||||||
@@ -31,20 +31,12 @@ func TestParseTermsExtended(t *testing.T) {
|
|||||||
terms[8][1].typ != termExact || terms[8][1].inv ||
|
terms[8][1].typ != termExact || terms[8][1].inv ||
|
||||||
terms[8][2].typ != termSuffix || terms[8][2].inv ||
|
terms[8][2].typ != termSuffix || terms[8][2].inv ||
|
||||||
terms[8][3].typ != termExact || !terms[8][3].inv {
|
terms[8][3].typ != termExact || !terms[8][3].inv {
|
||||||
t.Errorf("%s", terms)
|
t.Errorf("%v", terms)
|
||||||
}
|
}
|
||||||
for idx, termSet := range terms[:8] {
|
for _, termSet := range terms[:8] {
|
||||||
term := termSet[0]
|
term := termSet[0]
|
||||||
if len(term.text) != 3 {
|
if len(term.text) != 3 {
|
||||||
t.Errorf("%s", term)
|
t.Errorf("%v", term)
|
||||||
}
|
|
||||||
if idx > 0 && len(term.origText) != 4+idx/5 {
|
|
||||||
t.Errorf("%s", term)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, term := range terms[8] {
|
|
||||||
if len(term.origText) != 4 {
|
|
||||||
t.Errorf("%s", term)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -61,26 +53,32 @@ func TestParseTermsExtendedExact(t *testing.T) {
|
|||||||
terms[5][0].typ != termFuzzy || !terms[5][0].inv || len(terms[5][0].text) != 3 ||
|
terms[5][0].typ != termFuzzy || !terms[5][0].inv || len(terms[5][0].text) != 3 ||
|
||||||
terms[6][0].typ != termPrefix || !terms[6][0].inv || len(terms[6][0].text) != 3 ||
|
terms[6][0].typ != termPrefix || !terms[6][0].inv || len(terms[6][0].text) != 3 ||
|
||||||
terms[7][0].typ != termSuffix || !terms[7][0].inv || len(terms[7][0].text) != 3 {
|
terms[7][0].typ != termSuffix || !terms[7][0].inv || len(terms[7][0].text) != 3 {
|
||||||
t.Errorf("%s", terms)
|
t.Errorf("%v", terms)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseTermsEmpty(t *testing.T) {
|
func TestParseTermsEmpty(t *testing.T) {
|
||||||
terms := parseTerms(true, CaseSmart, false, "' $ ^ !' !^ !$")
|
terms := parseTerms(true, CaseSmart, false, "' ^ !' !^")
|
||||||
if len(terms) != 0 {
|
if len(terms) != 0 {
|
||||||
t.Errorf("%s", terms)
|
t.Errorf("%v", terms)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func buildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case, normalize bool, forward bool,
|
||||||
|
withPos bool, cacheable bool, nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
||||||
|
return BuildPattern(NewChunkCache(), make(map[string]*Pattern),
|
||||||
|
fuzzy, fuzzyAlgo, extended, caseMode, normalize, forward,
|
||||||
|
withPos, cacheable, nth, delimiter, revision{}, runes, nil)
|
||||||
|
}
|
||||||
|
|
||||||
func TestExact(t *testing.T) {
|
func TestExact(t *testing.T) {
|
||||||
defer clearPatternCache()
|
pattern := buildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, false, true,
|
||||||
clearPatternCache()
|
|
||||||
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, true,
|
|
||||||
[]Range{}, Delimiter{}, []rune("'abc"))
|
[]Range{}, Delimiter{}, []rune("'abc"))
|
||||||
|
chars := util.ToChars([]byte("aabbcc abc"))
|
||||||
res, pos := algo.ExactMatchNaive(
|
res, pos := algo.ExactMatchNaive(
|
||||||
pattern.caseSensitive, pattern.normalize, pattern.forward, util.RunesToChars([]rune("aabbcc abc")), pattern.termSets[0][0].text, true, nil)
|
pattern.caseSensitive, pattern.normalize, pattern.forward, &chars, pattern.termSets[0][0].text, true, nil)
|
||||||
if res.Start != 7 || res.End != 10 {
|
if res.Start != 7 || res.End != 10 {
|
||||||
t.Errorf("%s / %d / %d", pattern.termSets, res.Start, res.End)
|
t.Errorf("%v / %d / %d", pattern.termSets, res.Start, res.End)
|
||||||
}
|
}
|
||||||
if pos != nil {
|
if pos != nil {
|
||||||
t.Errorf("pos is expected to be nil")
|
t.Errorf("pos is expected to be nil")
|
||||||
@@ -88,15 +86,14 @@ func TestExact(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestEqual(t *testing.T) {
|
func TestEqual(t *testing.T) {
|
||||||
defer clearPatternCache()
|
pattern := buildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("^AbC$"))
|
||||||
clearPatternCache()
|
|
||||||
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("^AbC$"))
|
|
||||||
|
|
||||||
match := func(str string, sidxExpected int, eidxExpected int) {
|
match := func(str string, sidxExpected int, eidxExpected int) {
|
||||||
|
chars := util.ToChars([]byte(str))
|
||||||
res, pos := algo.EqualMatch(
|
res, pos := algo.EqualMatch(
|
||||||
pattern.caseSensitive, pattern.normalize, pattern.forward, util.RunesToChars([]rune(str)), pattern.termSets[0][0].text, true, nil)
|
pattern.caseSensitive, pattern.normalize, pattern.forward, &chars, pattern.termSets[0][0].text, true, nil)
|
||||||
if res.Start != sidxExpected || res.End != eidxExpected {
|
if res.Start != sidxExpected || res.End != eidxExpected {
|
||||||
t.Errorf("%s / %d / %d", pattern.termSets, res.Start, res.End)
|
t.Errorf("%v / %d / %d", pattern.termSets, res.Start, res.End)
|
||||||
}
|
}
|
||||||
if pos != nil {
|
if pos != nil {
|
||||||
t.Errorf("pos is expected to be nil")
|
t.Errorf("pos is expected to be nil")
|
||||||
@@ -104,22 +101,18 @@ func TestEqual(t *testing.T) {
|
|||||||
}
|
}
|
||||||
match("ABC", -1, -1)
|
match("ABC", -1, -1)
|
||||||
match("AbC", 0, 3)
|
match("AbC", 0, 3)
|
||||||
|
match("AbC ", 0, 3)
|
||||||
|
match(" AbC ", 1, 4)
|
||||||
|
match(" AbC", 2, 5)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCaseSensitivity(t *testing.T) {
|
func TestCaseSensitivity(t *testing.T) {
|
||||||
defer clearPatternCache()
|
pat1 := buildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||||
clearPatternCache()
|
pat2 := buildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||||
pat1 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
pat3 := buildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, false, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||||
clearPatternCache()
|
pat4 := buildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, false, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||||
pat2 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
pat5 := buildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, false, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||||
clearPatternCache()
|
pat6 := buildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, false, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||||
pat3 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
|
||||||
clearPatternCache()
|
|
||||||
pat4 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
|
||||||
clearPatternCache()
|
|
||||||
pat5 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
|
||||||
clearPatternCache()
|
|
||||||
pat6 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
|
||||||
|
|
||||||
if string(pat1.text) != "abc" || pat1.caseSensitive != false ||
|
if string(pat1.text) != "abc" || pat1.caseSensitive != false ||
|
||||||
string(pat2.text) != "Abc" || pat2.caseSensitive != true ||
|
string(pat2.text) != "Abc" || pat2.caseSensitive != true ||
|
||||||
@@ -132,31 +125,30 @@ func TestCaseSensitivity(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestOrigTextAndTransformed(t *testing.T) {
|
func TestOrigTextAndTransformed(t *testing.T) {
|
||||||
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("jg"))
|
pattern := buildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune("jg"))
|
||||||
tokens := Tokenize(util.RunesToChars([]rune("junegunn")), Delimiter{})
|
tokens := Tokenize("junegunn", Delimiter{})
|
||||||
trans := Transform(tokens, []Range{Range{1, 1}})
|
trans := Transform(tokens, []Range{{1, 1}})
|
||||||
|
|
||||||
origBytes := []byte("junegunn.choi")
|
origBytes := []byte("junegunn.choi")
|
||||||
for _, extended := range []bool{false, true} {
|
for _, extended := range []bool{false, true} {
|
||||||
chunk := Chunk{
|
chunk := Chunk{count: 1}
|
||||||
&Item{
|
chunk.items[0] = Item{
|
||||||
text: util.RunesToChars([]rune("junegunn")),
|
text: util.ToChars([]byte("junegunn")),
|
||||||
origText: &origBytes,
|
origText: &origBytes,
|
||||||
transformed: trans},
|
transformed: &transformed{pattern.revision, trans}}
|
||||||
}
|
|
||||||
pattern.extended = extended
|
pattern.extended = extended
|
||||||
matches := pattern.matchChunk(&chunk, nil, slab) // No cache
|
matches := pattern.matchChunk(&chunk, nil, slab) // No cache
|
||||||
if !(matches[0].item.text.ToString() == "junegunn" &&
|
if !(matches[0].item.text.ToString() == "junegunn" &&
|
||||||
string(*matches[0].item.origText) == "junegunn.choi" &&
|
string(*matches[0].item.origText) == "junegunn.choi" &&
|
||||||
reflect.DeepEqual(matches[0].item.transformed, trans)) {
|
reflect.DeepEqual((*matches[0].item.transformed).tokens, trans)) {
|
||||||
t.Error("Invalid match result", matches)
|
t.Error("Invalid match result", matches)
|
||||||
}
|
}
|
||||||
|
|
||||||
match, offsets, pos := pattern.MatchItem(chunk[0], true, slab)
|
match, offsets, pos := pattern.MatchItem(&chunk.items[0], true, slab)
|
||||||
if !(match.item.text.ToString() == "junegunn" &&
|
if !(match.item.text.ToString() == "junegunn" &&
|
||||||
string(*match.item.origText) == "junegunn.choi" &&
|
string(*match.item.origText) == "junegunn.choi" &&
|
||||||
offsets[0][0] == 0 && offsets[0][1] == 5 &&
|
offsets[0][0] == 0 && offsets[0][1] == 5 &&
|
||||||
reflect.DeepEqual(match.item.transformed, trans)) {
|
reflect.DeepEqual((*match.item.transformed).tokens, trans)) {
|
||||||
t.Error("Invalid match result", match, offsets, extended)
|
t.Error("Invalid match result", match, offsets, extended)
|
||||||
}
|
}
|
||||||
if !((*pos)[0] == 4 && (*pos)[1] == 0) {
|
if !((*pos)[0] == 4 && (*pos)[1] == 0) {
|
||||||
@@ -167,22 +159,43 @@ func TestOrigTextAndTransformed(t *testing.T) {
|
|||||||
|
|
||||||
func TestCacheKey(t *testing.T) {
|
func TestCacheKey(t *testing.T) {
|
||||||
test := func(extended bool, patStr string, expected string, cacheable bool) {
|
test := func(extended bool, patStr string, expected string, cacheable bool) {
|
||||||
pat := BuildPattern(true, algo.FuzzyMatchV2, extended, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune(patStr))
|
pat := buildPattern(true, algo.FuzzyMatchV2, extended, CaseSmart, false, true, false, true, []Range{}, Delimiter{}, []rune(patStr))
|
||||||
if pat.CacheKey() != expected {
|
if pat.CacheKey() != expected {
|
||||||
t.Errorf("Expected: %s, actual: %s", expected, pat.CacheKey())
|
t.Errorf("Expected: %s, actual: %s", expected, pat.CacheKey())
|
||||||
}
|
}
|
||||||
if pat.cacheable != cacheable {
|
if pat.cacheable != cacheable {
|
||||||
t.Errorf("Expected: %s, actual: %s (%s)", cacheable, pat.cacheable, patStr)
|
t.Errorf("Expected: %t, actual: %t (%s)", cacheable, pat.cacheable, patStr)
|
||||||
}
|
}
|
||||||
clearPatternCache()
|
|
||||||
}
|
}
|
||||||
test(false, "foo !bar", "foo !bar", true)
|
test(false, "foo !bar", "foo !bar", true)
|
||||||
test(false, "foo | bar !baz", "foo | bar !baz", true)
|
test(false, "foo | bar !baz", "foo | bar !baz", true)
|
||||||
test(true, "foo bar baz", "foo bar baz", true)
|
test(true, "foo bar baz", "foo\tbar\tbaz", true)
|
||||||
test(true, "foo !bar", "foo", false)
|
test(true, "foo !bar", "foo", false)
|
||||||
test(true, "foo !bar baz", "foo baz", false)
|
test(true, "foo !bar baz", "foo\tbaz", false)
|
||||||
test(true, "foo | bar baz", "baz", false)
|
test(true, "foo | bar baz", "baz", false)
|
||||||
test(true, "foo | bar | baz", "", false)
|
test(true, "foo | bar | baz", "", false)
|
||||||
test(true, "foo | bar !baz", "", false)
|
test(true, "foo | bar !baz", "", false)
|
||||||
test(true, "| | | foo", "foo", true)
|
test(true, "| | foo", "", false)
|
||||||
|
test(true, "| | | foo", "foo", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCacheable(t *testing.T) {
|
||||||
|
test := func(fuzzy bool, str string, expected string, cacheable bool) {
|
||||||
|
pat := buildPattern(fuzzy, algo.FuzzyMatchV2, true, CaseSmart, true, true, false, true, []Range{}, Delimiter{}, []rune(str))
|
||||||
|
if pat.CacheKey() != expected {
|
||||||
|
t.Errorf("Expected: %s, actual: %s", expected, pat.CacheKey())
|
||||||
|
}
|
||||||
|
if cacheable != pat.cacheable {
|
||||||
|
t.Errorf("Invalid Pattern.cacheable for \"%s\": %v (expected: %v)", str, pat.cacheable, cacheable)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
test(true, "foo bar", "foo\tbar", true)
|
||||||
|
test(true, "foo 'bar", "foo\tbar", false)
|
||||||
|
test(true, "foo !bar", "foo", false)
|
||||||
|
|
||||||
|
test(false, "foo bar", "foo\tbar", true)
|
||||||
|
test(false, "foo 'bar", "foo", false)
|
||||||
|
test(false, "foo '", "foo", true)
|
||||||
|
test(false, "foo 'bar", "foo", false)
|
||||||
|
test(false, "foo !bar", "foo", false)
|
||||||
}
|
}
|
||||||
|
|||||||
6
src/protector/protector.go
Normal file
6
src/protector/protector.go
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
//go:build !openbsd
|
||||||
|
|
||||||
|
package protector
|
||||||
|
|
||||||
|
// Protect calls OS specific protections like pledge on OpenBSD
|
||||||
|
func Protect() {}
|
||||||
10
src/protector/protector_openbsd.go
Normal file
10
src/protector/protector_openbsd.go
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//go:build openbsd
|
||||||
|
|
||||||
|
package protector
|
||||||
|
|
||||||
|
import "golang.org/x/sys/unix"
|
||||||
|
|
||||||
|
// Protect calls OS specific protections like pledge on OpenBSD
|
||||||
|
func Protect() {
|
||||||
|
unix.PledgePromises("stdio dpath wpath rpath tty proc exec inet tmppath")
|
||||||
|
}
|
||||||
162
src/proxy.go
Normal file
162
src/proxy.go
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
package fzf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"os/signal"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/junegunn/fzf/src/tui"
|
||||||
|
"github.com/junegunn/fzf/src/util"
|
||||||
|
)
|
||||||
|
|
||||||
|
const becomeSuffix = ".become"
|
||||||
|
|
||||||
|
func escapeSingleQuote(str string) string {
|
||||||
|
return "'" + strings.ReplaceAll(str, "'", "'\\''") + "'"
|
||||||
|
}
|
||||||
|
|
||||||
|
func fifo(name string) (string, error) {
|
||||||
|
ns := time.Now().UnixNano()
|
||||||
|
output := filepath.Join(os.TempDir(), fmt.Sprintf("fzf-%s-%d", name, ns))
|
||||||
|
output, err := mkfifo(output, 0600)
|
||||||
|
if err != nil {
|
||||||
|
return output, err
|
||||||
|
}
|
||||||
|
return output, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runProxy(commandPrefix string, cmdBuilder func(temp string, needBash bool) (*exec.Cmd, error), opts *Options, withExports bool) (int, error) {
|
||||||
|
output, err := fifo("proxy-output")
|
||||||
|
if err != nil {
|
||||||
|
return ExitError, err
|
||||||
|
}
|
||||||
|
defer os.Remove(output)
|
||||||
|
|
||||||
|
// Take the output
|
||||||
|
go func() {
|
||||||
|
withOutputPipe(output, func(outputFile io.ReadCloser) {
|
||||||
|
if opts.Output == nil {
|
||||||
|
io.Copy(os.Stdout, outputFile)
|
||||||
|
} else {
|
||||||
|
reader := bufio.NewReader(outputFile)
|
||||||
|
sep := opts.PrintSep[0]
|
||||||
|
for {
|
||||||
|
item, err := reader.ReadString(sep)
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
opts.Output <- item
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
|
||||||
|
var command, input string
|
||||||
|
commandPrefix += ` --no-force-tty-in --proxy-script "$0"`
|
||||||
|
if opts.Input == nil && (opts.ForceTtyIn || util.IsTty(os.Stdin)) {
|
||||||
|
command = fmt.Sprintf(`%s > %q`, commandPrefix, output)
|
||||||
|
} else {
|
||||||
|
input, err = fifo("proxy-input")
|
||||||
|
if err != nil {
|
||||||
|
return ExitError, err
|
||||||
|
}
|
||||||
|
defer os.Remove(input)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
withInputPipe(input, func(inputFile io.WriteCloser) {
|
||||||
|
if opts.Input == nil {
|
||||||
|
io.Copy(inputFile, os.Stdin)
|
||||||
|
} else {
|
||||||
|
for item := range opts.Input {
|
||||||
|
fmt.Fprint(inputFile, item+opts.PrintSep)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
|
||||||
|
if withExports {
|
||||||
|
command = fmt.Sprintf(`%s < %q > %q`, commandPrefix, input, output)
|
||||||
|
} else {
|
||||||
|
// For mintty: cannot directly read named pipe from Go code
|
||||||
|
command = fmt.Sprintf(`command cat %q | %s > %q`, input, commandPrefix, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the command to a temporary file and run it with sh to ensure POSIX compliance.
|
||||||
|
var exports []string
|
||||||
|
needBash := false
|
||||||
|
if withExports {
|
||||||
|
// Nullify FZF_DEFAULT_* variables as tmux popup may inject them even when undefined.
|
||||||
|
exports = []string{"FZF_DEFAULT_COMMAND=", "FZF_DEFAULT_OPTS=", "FZF_DEFAULT_OPTS_FILE="}
|
||||||
|
validIdentifier := regexp.MustCompile(`^[a-zA-Z_][a-zA-Z0-9_]*$`)
|
||||||
|
for _, pairStr := range os.Environ() {
|
||||||
|
pair := strings.SplitN(pairStr, "=", 2)
|
||||||
|
if validIdentifier.MatchString(pair[0]) {
|
||||||
|
exports = append(exports, fmt.Sprintf("export %s=%s", pair[0], escapeSingleQuote(pair[1])))
|
||||||
|
} else if strings.HasPrefix(pair[0], "BASH_FUNC_") && strings.HasSuffix(pair[0], "%%") {
|
||||||
|
name := pair[0][10 : len(pair[0])-2]
|
||||||
|
exports = append(exports, name+pair[1])
|
||||||
|
exports = append(exports, "export -f "+name)
|
||||||
|
needBash = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
temp := WriteTemporaryFile(append(exports, command), "\n")
|
||||||
|
defer os.Remove(temp)
|
||||||
|
|
||||||
|
cmd, err := cmdBuilder(temp, needBash)
|
||||||
|
if err != nil {
|
||||||
|
return ExitError, err
|
||||||
|
}
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
intChan := make(chan os.Signal, 1)
|
||||||
|
defer close(intChan)
|
||||||
|
go func() {
|
||||||
|
if sig, valid := <-intChan; valid {
|
||||||
|
cmd.Process.Signal(sig)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
signal.Notify(intChan, os.Interrupt)
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
if exitError, ok := err.(*exec.ExitError); ok {
|
||||||
|
code := exitError.ExitCode()
|
||||||
|
if code == ExitBecome {
|
||||||
|
becomeFile := temp + becomeSuffix
|
||||||
|
data, err := os.ReadFile(becomeFile)
|
||||||
|
os.Remove(becomeFile)
|
||||||
|
if err != nil {
|
||||||
|
return ExitError, err
|
||||||
|
}
|
||||||
|
elems := strings.Split(string(data), "\x00")
|
||||||
|
if len(elems) < 1 {
|
||||||
|
return ExitError, errors.New("invalid become command")
|
||||||
|
}
|
||||||
|
command := elems[0]
|
||||||
|
env := []string{}
|
||||||
|
if len(elems) > 1 {
|
||||||
|
env = elems[1:]
|
||||||
|
}
|
||||||
|
executor := util.NewExecutor(opts.WithShell)
|
||||||
|
ttyin, err := tui.TtyIn(opts.TtyDefault)
|
||||||
|
if err != nil {
|
||||||
|
return ExitError, err
|
||||||
|
}
|
||||||
|
os.Remove(temp)
|
||||||
|
os.Remove(input)
|
||||||
|
os.Remove(output)
|
||||||
|
executor.Become(ttyin, env, command)
|
||||||
|
}
|
||||||
|
return code, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ExitOk, nil
|
||||||
|
}
|
||||||
41
src/proxy_unix.go
Normal file
41
src/proxy_unix.go
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
//go:build !windows
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"golang.org/x/sys/unix"
|
||||||
|
)
|
||||||
|
|
||||||
|
func sh(bash bool) (string, error) {
|
||||||
|
if bash {
|
||||||
|
return "bash", nil
|
||||||
|
}
|
||||||
|
return "sh", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mkfifo(path string, mode uint32) (string, error) {
|
||||||
|
return path, unix.Mkfifo(path, mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func withOutputPipe(output string, task func(io.ReadCloser)) error {
|
||||||
|
outputFile, err := os.OpenFile(output, os.O_RDONLY, 0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
task(outputFile)
|
||||||
|
outputFile.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func withInputPipe(input string, task func(io.WriteCloser)) error {
|
||||||
|
inputFile, err := os.OpenFile(input, os.O_WRONLY, 0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
task(inputFile)
|
||||||
|
inputFile.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
85
src/proxy_windows.go
Normal file
85
src/proxy_windows.go
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
//go:build windows
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os/exec"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
|
)
|
||||||
|
|
||||||
|
var shPath atomic.Value
|
||||||
|
|
||||||
|
func sh(bash bool) (string, error) {
|
||||||
|
if cached := shPath.Load(); cached != nil {
|
||||||
|
return cached.(string), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
name := "sh"
|
||||||
|
if bash {
|
||||||
|
name = "bash"
|
||||||
|
}
|
||||||
|
cmd := exec.Command("cygpath", "-w", "/usr/bin/"+name)
|
||||||
|
bytes, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
sh := strings.TrimSpace(string(bytes))
|
||||||
|
shPath.Store(sh)
|
||||||
|
return sh, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mkfifo(path string, mode uint32) (string, error) {
|
||||||
|
m := strconv.FormatUint(uint64(mode), 8)
|
||||||
|
sh, err := sh(false)
|
||||||
|
if err != nil {
|
||||||
|
return path, err
|
||||||
|
}
|
||||||
|
cmd := exec.Command(sh, "-c", fmt.Sprintf(`command mkfifo -m %s %q`, m, path))
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
return path, err
|
||||||
|
}
|
||||||
|
return path + ".lnk", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func withOutputPipe(output string, task func(io.ReadCloser)) error {
|
||||||
|
sh, err := sh(false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
cmd := exec.Command(sh, "-c", fmt.Sprintf(`command cat %q`, output))
|
||||||
|
outputFile, err := cmd.StdoutPipe()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
task(outputFile)
|
||||||
|
cmd.Wait()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func withInputPipe(input string, task func(io.WriteCloser)) error {
|
||||||
|
sh, err := sh(false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
cmd := exec.Command(sh, "-c", fmt.Sprintf(`command cat - > %q`, input))
|
||||||
|
inputFile, err := cmd.StdinPipe()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
task(inputFile)
|
||||||
|
inputFile.Close()
|
||||||
|
cmd.Wait()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
368
src/reader.go
368
src/reader.go
@@ -1,78 +1,376 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bytes"
|
||||||
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/charlievieth/fastwalk"
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Reader reads from command or standard input
|
// Reader reads from command or standard input
|
||||||
type Reader struct {
|
type Reader struct {
|
||||||
pusher func([]byte) bool
|
pusher func([]byte) bool
|
||||||
|
executor *util.Executor
|
||||||
eventBox *util.EventBox
|
eventBox *util.EventBox
|
||||||
delimNil bool
|
delimNil bool
|
||||||
|
event int32
|
||||||
|
finChan chan bool
|
||||||
|
mutex sync.Mutex
|
||||||
|
killed bool
|
||||||
|
termFunc func()
|
||||||
|
command *string
|
||||||
|
wait bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader returns new Reader object
|
||||||
|
func NewReader(pusher func([]byte) bool, eventBox *util.EventBox, executor *util.Executor, delimNil bool, wait bool) *Reader {
|
||||||
|
return &Reader{
|
||||||
|
pusher,
|
||||||
|
executor,
|
||||||
|
eventBox,
|
||||||
|
delimNil,
|
||||||
|
int32(EvtReady),
|
||||||
|
make(chan bool, 1),
|
||||||
|
sync.Mutex{},
|
||||||
|
false,
|
||||||
|
func() { os.Stdin.Close() },
|
||||||
|
nil,
|
||||||
|
wait}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) startEventPoller() {
|
||||||
|
go func() {
|
||||||
|
ptr := &r.event
|
||||||
|
pollInterval := readerPollIntervalMin
|
||||||
|
for {
|
||||||
|
if atomic.CompareAndSwapInt32(ptr, int32(EvtReadNew), int32(EvtReady)) {
|
||||||
|
r.eventBox.Set(EvtReadNew, (*string)(nil))
|
||||||
|
pollInterval = readerPollIntervalMin
|
||||||
|
} else if atomic.LoadInt32(ptr) == int32(EvtReadFin) {
|
||||||
|
if r.wait {
|
||||||
|
r.finChan <- true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
pollInterval += readerPollIntervalStep
|
||||||
|
if pollInterval > readerPollIntervalMax {
|
||||||
|
pollInterval = readerPollIntervalMax
|
||||||
|
}
|
||||||
|
}
|
||||||
|
time.Sleep(pollInterval)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) fin(success bool) {
|
||||||
|
atomic.StoreInt32(&r.event, int32(EvtReadFin))
|
||||||
|
if r.wait {
|
||||||
|
<-r.finChan
|
||||||
|
}
|
||||||
|
|
||||||
|
r.mutex.Lock()
|
||||||
|
ret := r.command
|
||||||
|
if success || r.killed {
|
||||||
|
ret = nil
|
||||||
|
}
|
||||||
|
r.mutex.Unlock()
|
||||||
|
|
||||||
|
r.eventBox.Set(EvtReadFin, ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) terminate() {
|
||||||
|
r.mutex.Lock()
|
||||||
|
r.killed = true
|
||||||
|
if r.termFunc != nil {
|
||||||
|
r.termFunc()
|
||||||
|
r.termFunc = nil
|
||||||
|
}
|
||||||
|
r.mutex.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) restart(command commandSpec, environ []string, readyChan chan bool) {
|
||||||
|
r.event = int32(EvtReady)
|
||||||
|
r.startEventPoller()
|
||||||
|
success := r.readFromCommand(command.command, environ, func() {
|
||||||
|
readyChan <- true
|
||||||
|
})
|
||||||
|
r.fin(success)
|
||||||
|
removeFiles(command.tempFiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) readChannel(inputChan chan string) bool {
|
||||||
|
for {
|
||||||
|
item, more := <-inputChan
|
||||||
|
if !more {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if r.pusher([]byte(item)) {
|
||||||
|
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadSource reads data from the default command or from standard input
|
// ReadSource reads data from the default command or from standard input
|
||||||
func (r *Reader) ReadSource() {
|
func (r *Reader) ReadSource(inputChan chan string, roots []string, opts walkerOpts, ignores []string, initCmd string, initEnv []string, readyChan chan bool) {
|
||||||
if util.IsTty() {
|
r.startEventPoller()
|
||||||
|
var success bool
|
||||||
|
signalReady := func() {
|
||||||
|
if readyChan != nil {
|
||||||
|
readyChan <- true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if inputChan != nil {
|
||||||
|
signalReady()
|
||||||
|
success = r.readChannel(inputChan)
|
||||||
|
} else if len(initCmd) > 0 {
|
||||||
|
success = r.readFromCommand(initCmd, initEnv, signalReady)
|
||||||
|
} else if util.IsTty(os.Stdin) {
|
||||||
cmd := os.Getenv("FZF_DEFAULT_COMMAND")
|
cmd := os.Getenv("FZF_DEFAULT_COMMAND")
|
||||||
if len(cmd) == 0 {
|
if len(cmd) == 0 {
|
||||||
cmd = defaultCommand
|
signalReady()
|
||||||
|
success = r.readFiles(roots, opts, ignores)
|
||||||
|
} else {
|
||||||
|
success = r.readFromCommand(cmd, initEnv, signalReady)
|
||||||
}
|
}
|
||||||
r.readFromCommand(cmd)
|
|
||||||
} else {
|
} else {
|
||||||
r.readFromStdin()
|
signalReady()
|
||||||
|
success = r.readFromStdin()
|
||||||
}
|
}
|
||||||
r.eventBox.Set(EvtReadFin, nil)
|
r.fin(success)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Reader) feed(src io.Reader) {
|
func (r *Reader) feed(src io.Reader) {
|
||||||
|
/*
|
||||||
|
readerSlabSize, ae := strconv.Atoi(os.Getenv("SLAB_KB"))
|
||||||
|
if ae != nil {
|
||||||
|
readerSlabSize = 128 * 1024
|
||||||
|
} else {
|
||||||
|
readerSlabSize *= 1024
|
||||||
|
}
|
||||||
|
readerBufferSize, be := strconv.Atoi(os.Getenv("BUF_KB"))
|
||||||
|
if be != nil {
|
||||||
|
readerBufferSize = 64 * 1024
|
||||||
|
} else {
|
||||||
|
readerBufferSize *= 1024
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
delim := byte('\n')
|
delim := byte('\n')
|
||||||
|
trimCR := util.IsWindows()
|
||||||
if r.delimNil {
|
if r.delimNil {
|
||||||
delim = '\000'
|
delim = '\000'
|
||||||
|
trimCR = false
|
||||||
}
|
}
|
||||||
reader := bufio.NewReaderSize(src, readerBufferSize)
|
|
||||||
|
slab := make([]byte, readerSlabSize)
|
||||||
|
leftover := []byte{}
|
||||||
|
var err error
|
||||||
for {
|
for {
|
||||||
// ReadBytes returns err != nil if and only if the returned data does not
|
n := 0
|
||||||
// end in delim.
|
scope := slab[:util.Min(len(slab), readerBufferSize)]
|
||||||
bytea, err := reader.ReadBytes(delim)
|
for i := 0; i < 100; i++ {
|
||||||
byteaLen := len(bytea)
|
n, err = src.Read(scope)
|
||||||
if len(bytea) > 0 {
|
if n > 0 || err != nil {
|
||||||
if err == nil {
|
break
|
||||||
// get rid of carriage return if under Windows:
|
|
||||||
if util.IsWindows() && byteaLen >= 2 && bytea[byteaLen-2] == byte('\r') {
|
|
||||||
bytea = bytea[:byteaLen-2]
|
|
||||||
} else {
|
|
||||||
bytea = bytea[:byteaLen-1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if r.pusher(bytea) {
|
|
||||||
r.eventBox.Set(EvtReadNew, nil)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err != nil {
|
|
||||||
|
// We're not making any progress after 100 tries. Stop.
|
||||||
|
if n == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
buf := slab[:n]
|
||||||
|
slab = slab[n:]
|
||||||
|
|
||||||
|
for len(buf) > 0 {
|
||||||
|
if i := bytes.IndexByte(buf, delim); i >= 0 {
|
||||||
|
// Found the delimiter
|
||||||
|
slice := buf[:i+1]
|
||||||
|
buf = buf[i+1:]
|
||||||
|
if trimCR && len(slice) >= 2 && slice[len(slice)-2] == byte('\r') {
|
||||||
|
slice = slice[:len(slice)-2]
|
||||||
|
} else {
|
||||||
|
slice = slice[:len(slice)-1]
|
||||||
|
}
|
||||||
|
if len(leftover) > 0 {
|
||||||
|
slice = append(leftover, slice...)
|
||||||
|
leftover = []byte{}
|
||||||
|
}
|
||||||
|
if (err == nil || len(slice) > 0) && r.pusher(slice) {
|
||||||
|
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Could not find the delimiter in the buffer
|
||||||
|
// NOTE: We can further optimize this by keeping track of the cursor
|
||||||
|
// position in the slab so that a straddling item that doesn't go
|
||||||
|
// beyond the boundary of a slab doesn't need to be copied to
|
||||||
|
// another buffer. However, the performance gain is negligible in
|
||||||
|
// practice (< 0.1%) and is not
|
||||||
|
// worth the added complexity.
|
||||||
|
leftover = append(leftover, buf...)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == io.EOF {
|
||||||
|
leftover = append(leftover, buf...)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(slab) == 0 {
|
||||||
|
slab = make([]byte, readerSlabSize)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(leftover) > 0 && r.pusher(leftover) {
|
||||||
|
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Reader) readFromStdin() {
|
func (r *Reader) readFromStdin() bool {
|
||||||
r.feed(os.Stdin)
|
r.feed(os.Stdin)
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Reader) readFromCommand(cmd string) {
|
func isSymlinkToDir(path string, de os.DirEntry) bool {
|
||||||
listCommand := util.ExecCommand(cmd)
|
if de.Type()&fs.ModeSymlink == 0 {
|
||||||
out, err := listCommand.StdoutPipe()
|
return false
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
err = listCommand.Start()
|
if s, err := os.Stat(path); err == nil {
|
||||||
if err != nil {
|
return s.IsDir()
|
||||||
return
|
|
||||||
}
|
}
|
||||||
defer listCommand.Wait()
|
return false
|
||||||
r.feed(out)
|
}
|
||||||
|
|
||||||
|
func trimPath(path string) string {
|
||||||
|
bytes := stringBytes(path)
|
||||||
|
|
||||||
|
for len(bytes) > 1 && bytes[0] == '.' && (bytes[1] == '/' || bytes[1] == '\\') {
|
||||||
|
bytes = bytes[2:]
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(bytes) == 0 {
|
||||||
|
return "."
|
||||||
|
}
|
||||||
|
|
||||||
|
return byteString(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) readFiles(roots []string, opts walkerOpts, ignores []string) bool {
|
||||||
|
conf := fastwalk.Config{
|
||||||
|
Follow: opts.follow,
|
||||||
|
// Use forward slashes when running a Windows binary under WSL or MSYS
|
||||||
|
ToSlash: fastwalk.DefaultToSlash(),
|
||||||
|
Sort: fastwalk.SortFilesFirst,
|
||||||
|
}
|
||||||
|
ignoresBase := []string{}
|
||||||
|
ignoresFull := []string{}
|
||||||
|
ignoresSuffix := []string{}
|
||||||
|
sep := string(os.PathSeparator)
|
||||||
|
if _, ok := os.LookupEnv("MSYSTEM"); ok {
|
||||||
|
sep = "/"
|
||||||
|
}
|
||||||
|
for _, ignore := range ignores {
|
||||||
|
if strings.ContainsRune(ignore, os.PathSeparator) {
|
||||||
|
if strings.HasPrefix(ignore, sep) {
|
||||||
|
ignoresSuffix = append(ignoresSuffix, ignore)
|
||||||
|
} else {
|
||||||
|
// 'foo/bar' should match
|
||||||
|
// * 'foo/bar'
|
||||||
|
// * 'baz/foo/bar'
|
||||||
|
// * but NOT 'bazfoo/bar'
|
||||||
|
ignoresFull = append(ignoresFull, ignore)
|
||||||
|
ignoresSuffix = append(ignoresSuffix, sep+ignore)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ignoresBase = append(ignoresBase, ignore)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn := func(path string, de os.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
path = trimPath(path)
|
||||||
|
if path != "." {
|
||||||
|
isDir := de.IsDir()
|
||||||
|
if isDir || opts.follow && isSymlinkToDir(path, de) {
|
||||||
|
base := filepath.Base(path)
|
||||||
|
if !opts.hidden && base[0] == '.' && base != ".." {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
for _, ignore := range ignoresBase {
|
||||||
|
if ignore == base {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, ignore := range ignoresFull {
|
||||||
|
if ignore == path {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, ignore := range ignoresSuffix {
|
||||||
|
if strings.HasSuffix(path, ignore) {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if path != sep {
|
||||||
|
path += sep
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ((opts.file && !isDir) || (opts.dir && isDir)) && r.pusher(stringBytes(path)) {
|
||||||
|
atomic.StoreInt32(&r.event, int32(EvtReadNew))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.mutex.Lock()
|
||||||
|
defer r.mutex.Unlock()
|
||||||
|
if r.killed {
|
||||||
|
return context.Canceled
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
noerr := true
|
||||||
|
for _, root := range roots {
|
||||||
|
noerr = noerr && (fastwalk.Walk(&conf, root, fn) == nil)
|
||||||
|
}
|
||||||
|
return noerr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Reader) readFromCommand(command string, environ []string, signalReady func()) bool {
|
||||||
|
r.mutex.Lock()
|
||||||
|
|
||||||
|
r.killed = false
|
||||||
|
r.termFunc = nil
|
||||||
|
r.command = &command
|
||||||
|
exec := r.executor.ExecCommand(command, true)
|
||||||
|
if environ != nil {
|
||||||
|
exec.Env = environ
|
||||||
|
}
|
||||||
|
execOut, err := exec.StdoutPipe()
|
||||||
|
if err != nil || exec.Start() != nil {
|
||||||
|
signalReady()
|
||||||
|
r.mutex.Unlock()
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to call to terminate the running command
|
||||||
|
r.termFunc = func() {
|
||||||
|
execOut.Close()
|
||||||
|
util.KillCommand(exec)
|
||||||
|
}
|
||||||
|
|
||||||
|
signalReady()
|
||||||
|
r.mutex.Unlock()
|
||||||
|
|
||||||
|
r.feed(execOut)
|
||||||
|
return exec.Wait() == nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package fzf
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
@@ -9,9 +10,12 @@ import (
|
|||||||
func TestReadFromCommand(t *testing.T) {
|
func TestReadFromCommand(t *testing.T) {
|
||||||
strs := []string{}
|
strs := []string{}
|
||||||
eb := util.NewEventBox()
|
eb := util.NewEventBox()
|
||||||
reader := Reader{
|
exec := util.NewExecutor("")
|
||||||
pusher: func(s []byte) bool { strs = append(strs, string(s)); return true },
|
reader := NewReader(
|
||||||
eventBox: eb}
|
func(s []byte) bool { strs = append(strs, string(s)); return true },
|
||||||
|
eb, exec, false, true)
|
||||||
|
|
||||||
|
reader.startEventPoller()
|
||||||
|
|
||||||
// Check EventBox
|
// Check EventBox
|
||||||
if eb.Peek(EvtReadNew) {
|
if eb.Peek(EvtReadNew) {
|
||||||
@@ -19,21 +23,20 @@ func TestReadFromCommand(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Normal command
|
// Normal command
|
||||||
reader.readFromCommand(`echo abc && echo def`)
|
counter := 0
|
||||||
if len(strs) != 2 || strs[0] != "abc" || strs[1] != "def" {
|
ready := func() {
|
||||||
|
counter++
|
||||||
|
}
|
||||||
|
reader.fin(reader.readFromCommand(`echo abc&&echo def`, nil, ready))
|
||||||
|
if len(strs) != 2 || strs[0] != "abc" || strs[1] != "def" || counter != 1 {
|
||||||
t.Errorf("%s", strs)
|
t.Errorf("%s", strs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check EventBox again
|
// Check EventBox again
|
||||||
if !eb.Peek(EvtReadNew) {
|
eb.WaitFor(EvtReadFin)
|
||||||
t.Error("EvtReadNew should be set yet")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait should return immediately
|
// Wait should return immediately
|
||||||
eb.Wait(func(events *util.Events) {
|
eb.Wait(func(events *util.Events) {
|
||||||
if _, found := (*events)[EvtReadNew]; !found {
|
|
||||||
t.Errorf("%s", events)
|
|
||||||
}
|
|
||||||
events.Clear()
|
events.Clear()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -42,15 +45,24 @@ func TestReadFromCommand(t *testing.T) {
|
|||||||
t.Error("EvtReadNew should not be set yet")
|
t.Error("EvtReadNew should not be set yet")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Make sure that event poller is finished
|
||||||
|
time.Sleep(readerPollIntervalMax)
|
||||||
|
|
||||||
|
// Restart event poller
|
||||||
|
reader.startEventPoller()
|
||||||
|
|
||||||
// Failing command
|
// Failing command
|
||||||
reader.readFromCommand(`no-such-command`)
|
reader.fin(reader.readFromCommand(`no-such-command`, nil, ready))
|
||||||
strs = []string{}
|
strs = []string{}
|
||||||
if len(strs) > 0 {
|
if len(strs) > 0 || counter != 2 {
|
||||||
t.Errorf("%s", strs)
|
t.Errorf("%s", strs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check EventBox again
|
// Check EventBox again
|
||||||
if eb.Peek(EvtReadNew) {
|
if eb.Peek(EvtReadNew) {
|
||||||
t.Error("Command failed. EvtReadNew should be set")
|
t.Error("Command failed. EvtReadNew should not be set")
|
||||||
|
}
|
||||||
|
if !eb.Peek(EvtReadFin) {
|
||||||
|
t.Error("EvtReadFin should be set")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
233
src/result.go
233
src/result.go
@@ -15,34 +15,35 @@ type Offset [2]int32
|
|||||||
type colorOffset struct {
|
type colorOffset struct {
|
||||||
offset [2]int32
|
offset [2]int32
|
||||||
color tui.ColorPair
|
color tui.ColorPair
|
||||||
attr tui.Attr
|
match bool
|
||||||
index int32
|
url *url
|
||||||
}
|
}
|
||||||
|
|
||||||
type rank struct {
|
func (co colorOffset) IsFullBgMarker(at int32) bool {
|
||||||
points [4]uint16
|
return at == co.offset[0] && at == co.offset[1] && co.color.Attr()&tui.FullBg > 0
|
||||||
index int32
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Result struct {
|
type Result struct {
|
||||||
item *Item
|
item *Item
|
||||||
rank rank
|
points [4]uint16
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildResult(item *Item, offsets []Offset, score int, trimLen int) *Result {
|
func buildResult(item *Item, offsets []Offset, score int) Result {
|
||||||
if len(offsets) > 1 {
|
if len(offsets) > 1 {
|
||||||
sort.Sort(ByOrder(offsets))
|
sort.Sort(ByOrder(offsets))
|
||||||
}
|
}
|
||||||
|
|
||||||
result := Result{item: item, rank: rank{index: item.index}}
|
result := Result{item: item}
|
||||||
numChars := item.text.Length()
|
numChars := item.text.Length()
|
||||||
minBegin := math.MaxUint16
|
minBegin := math.MaxUint16
|
||||||
|
minEnd := math.MaxUint16
|
||||||
maxEnd := 0
|
maxEnd := 0
|
||||||
validOffsetFound := false
|
validOffsetFound := false
|
||||||
for _, offset := range offsets {
|
for _, offset := range offsets {
|
||||||
b, e := int(offset[0]), int(offset[1])
|
b, e := int(offset[0]), int(offset[1])
|
||||||
if b < e {
|
if b < e {
|
||||||
minBegin = util.Min(b, minBegin)
|
minBegin = util.Min(b, minBegin)
|
||||||
|
minEnd = util.Min(e, minEnd)
|
||||||
maxEnd = util.Max(e, maxEnd)
|
maxEnd = util.Max(e, maxEnd)
|
||||||
validOffsetFound = true
|
validOffsetFound = true
|
||||||
}
|
}
|
||||||
@@ -54,9 +55,39 @@ func buildResult(item *Item, offsets []Offset, score int, trimLen int) *Result {
|
|||||||
case byScore:
|
case byScore:
|
||||||
// Higher is better
|
// Higher is better
|
||||||
val = math.MaxUint16 - util.AsUint16(score)
|
val = math.MaxUint16 - util.AsUint16(score)
|
||||||
|
case byChunk:
|
||||||
|
if validOffsetFound {
|
||||||
|
b := minBegin
|
||||||
|
e := maxEnd
|
||||||
|
for ; b >= 1; b-- {
|
||||||
|
if unicode.IsSpace(item.text.Get(b - 1)) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for ; e < numChars; e++ {
|
||||||
|
if unicode.IsSpace(item.text.Get(e)) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val = util.AsUint16(e - b)
|
||||||
|
}
|
||||||
case byLength:
|
case byLength:
|
||||||
// If offsets is empty, trimLen will be 0, but we don't care
|
val = item.TrimLength()
|
||||||
val = util.AsUint16(trimLen)
|
case byPathname:
|
||||||
|
if validOffsetFound {
|
||||||
|
// lastDelim := strings.LastIndexByte(item.text.ToString(), '/')
|
||||||
|
lastDelim := -1
|
||||||
|
s := item.text.ToString()
|
||||||
|
for i := len(s) - 1; i >= 0; i-- {
|
||||||
|
if s[i] == '/' || s[i] == '\\' {
|
||||||
|
lastDelim = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lastDelim <= minBegin {
|
||||||
|
val = util.AsUint16(minBegin - lastDelim)
|
||||||
|
}
|
||||||
|
}
|
||||||
case byBegin, byEnd:
|
case byBegin, byEnd:
|
||||||
if validOffsetFound {
|
if validOffsetFound {
|
||||||
whitePrefixLen := 0
|
whitePrefixLen := 0
|
||||||
@@ -68,16 +99,16 @@ func buildResult(item *Item, offsets []Offset, score int, trimLen int) *Result {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if criterion == byBegin {
|
if criterion == byBegin {
|
||||||
val = util.AsUint16(minBegin - whitePrefixLen)
|
val = util.AsUint16(minEnd - whitePrefixLen)
|
||||||
} else {
|
} else {
|
||||||
val = util.AsUint16(math.MaxUint16 - math.MaxUint16*(maxEnd-whitePrefixLen)/trimLen)
|
val = util.AsUint16(math.MaxUint16 - math.MaxUint16*(maxEnd-whitePrefixLen)/(int(item.TrimLength())+1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.rank.points[idx] = val
|
result.points[3-idx] = val
|
||||||
}
|
}
|
||||||
|
|
||||||
return &result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort criteria to use. Never changes once fzf is started.
|
// Sort criteria to use. Never changes once fzf is started.
|
||||||
@@ -85,28 +116,28 @@ var sortCriteria []criterion
|
|||||||
|
|
||||||
// Index returns ordinal index of the Item
|
// Index returns ordinal index of the Item
|
||||||
func (result *Result) Index() int32 {
|
func (result *Result) Index() int32 {
|
||||||
return result.item.index
|
return result.item.Index()
|
||||||
}
|
}
|
||||||
|
|
||||||
func minRank() rank {
|
func minRank() Result {
|
||||||
return rank{index: 0, points: [4]uint16{math.MaxUint16, 0, 0, 0}}
|
return Result{item: &minItem, points: [4]uint16{math.MaxUint16, 0, 0, 0}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (result *Result) colorOffsets(matchOffsets []Offset, theme *tui.ColorTheme, color tui.ColorPair, attr tui.Attr, current bool) []colorOffset {
|
func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, theme *tui.ColorTheme, colBase tui.ColorPair, colMatch tui.ColorPair, attrNth tui.Attr, hidden bool) []colorOffset {
|
||||||
itemColors := result.item.Colors()
|
itemColors := result.item.Colors()
|
||||||
|
|
||||||
// No ANSI code, or --color=no
|
// No ANSI codes
|
||||||
if len(itemColors) == 0 {
|
if len(itemColors) == 0 && len(nthOffsets) == 0 {
|
||||||
var offsets []colorOffset
|
offsets := make([]colorOffset, len(matchOffsets))
|
||||||
for _, off := range matchOffsets {
|
for i, off := range matchOffsets {
|
||||||
offsets = append(offsets, colorOffset{offset: [2]int32{off[0], off[1]}, color: color, attr: attr})
|
offsets[i] = colorOffset{offset: [2]int32{off[0], off[1]}, color: colMatch, match: true}
|
||||||
}
|
}
|
||||||
return offsets
|
return offsets
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find max column
|
// Find max column
|
||||||
var maxCol int32
|
var maxCol int32
|
||||||
for _, off := range matchOffsets {
|
for _, off := range append(matchOffsets, nthOffsets...) {
|
||||||
if off[1] > maxCol {
|
if off[1] > maxCol {
|
||||||
maxCol = off[1]
|
maxCol = off[1]
|
||||||
}
|
}
|
||||||
@@ -116,17 +147,38 @@ func (result *Result) colorOffsets(matchOffsets []Offset, theme *tui.ColorTheme,
|
|||||||
maxCol = ansi.offset[1]
|
maxCol = ansi.offset[1]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cols := make([]int, maxCol)
|
|
||||||
|
|
||||||
|
type cellInfo struct {
|
||||||
|
index int
|
||||||
|
color bool
|
||||||
|
match bool
|
||||||
|
nth bool
|
||||||
|
fbg tui.Color
|
||||||
|
}
|
||||||
|
|
||||||
|
cols := make([]cellInfo, maxCol+1)
|
||||||
|
for idx := range cols {
|
||||||
|
cols[idx].fbg = -1
|
||||||
|
}
|
||||||
for colorIndex, ansi := range itemColors {
|
for colorIndex, ansi := range itemColors {
|
||||||
for i := ansi.offset[0]; i < ansi.offset[1]; i++ {
|
if ansi.offset[0] == ansi.offset[1] && ansi.color.attr&tui.FullBg > 0 {
|
||||||
cols[i] = colorIndex + 1 // XXX
|
cols[ansi.offset[0]].fbg = ansi.color.lbg
|
||||||
|
} else {
|
||||||
|
for i := ansi.offset[0]; i < ansi.offset[1]; i++ {
|
||||||
|
cols[i] = cellInfo{colorIndex, true, false, false, cols[i].fbg}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, off := range matchOffsets {
|
for _, off := range matchOffsets {
|
||||||
for i := off[0]; i < off[1]; i++ {
|
for i := off[0]; i < off[1]; i++ {
|
||||||
cols[i] = -1
|
cols[i].match = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, off := range nthOffsets {
|
||||||
|
for i := off[0]; i < off[1]; i++ {
|
||||||
|
cols[i].nth = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,38 +188,92 @@ func (result *Result) colorOffsets(matchOffsets []Offset, theme *tui.ColorTheme,
|
|||||||
// ------------ ---- -- ----
|
// ------------ ---- -- ----
|
||||||
// ++++++++ ++++++++++
|
// ++++++++ ++++++++++
|
||||||
// --++++++++-- --++++++++++---
|
// --++++++++-- --++++++++++---
|
||||||
curr := 0
|
curr := cellInfo{0, false, false, false, -1}
|
||||||
start := 0
|
start := 0
|
||||||
|
ansiToColorPair := func(ansi ansiOffset, base tui.ColorPair) tui.ColorPair {
|
||||||
|
if !theme.Colored {
|
||||||
|
return tui.NewColorPair(-1, -1, ansi.color.attr).MergeAttr(base)
|
||||||
|
}
|
||||||
|
// fd --color always | fzf --ansi --delimiter / --nth -1 --color fg:dim:strip,nth:regular
|
||||||
|
if base.ShouldStripColors() {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
fg := ansi.color.fg
|
||||||
|
bg := ansi.color.bg
|
||||||
|
if fg == -1 {
|
||||||
|
fg = colBase.Fg()
|
||||||
|
}
|
||||||
|
if bg == -1 {
|
||||||
|
bg = colBase.Bg()
|
||||||
|
}
|
||||||
|
return tui.NewColorPair(fg, bg, ansi.color.attr).MergeAttr(base)
|
||||||
|
}
|
||||||
var colors []colorOffset
|
var colors []colorOffset
|
||||||
add := func(idx int) {
|
add := func(idx int) {
|
||||||
if curr != 0 && idx > start {
|
if curr.fbg >= 0 {
|
||||||
if curr == -1 {
|
colors = append(colors, colorOffset{
|
||||||
colors = append(colors, colorOffset{
|
offset: [2]int32{int32(start), int32(start)},
|
||||||
offset: [2]int32{int32(start), int32(idx)}, color: color, attr: attr})
|
color: tui.NewColorPair(-1, curr.fbg, tui.FullBg),
|
||||||
} else {
|
match: false,
|
||||||
ansi := itemColors[curr-1]
|
url: nil})
|
||||||
fg := ansi.color.fg
|
}
|
||||||
bg := ansi.color.bg
|
if (curr.color || curr.nth || curr.match) && idx > start {
|
||||||
if theme != nil {
|
if curr.match {
|
||||||
if fg == -1 {
|
var color tui.ColorPair
|
||||||
if current {
|
if curr.nth {
|
||||||
fg = theme.Current
|
color = colBase.WithAttr(attrNth).Merge(colMatch)
|
||||||
} else {
|
} else {
|
||||||
fg = theme.Fg
|
color = colBase.Merge(colMatch)
|
||||||
}
|
}
|
||||||
}
|
var url *url
|
||||||
if bg == -1 {
|
if curr.color {
|
||||||
if current {
|
ansi := itemColors[curr.index]
|
||||||
bg = theme.DarkBg
|
url = ansi.color.url
|
||||||
} else {
|
origColor := ansiToColorPair(ansi, colMatch)
|
||||||
bg = theme.Bg
|
// hl or hl+ only sets the foreground color, so colMatch is the
|
||||||
|
// combination of either [hl and bg] or [hl+ and bg+].
|
||||||
|
//
|
||||||
|
// If the original text already has background color, and the
|
||||||
|
// foreground color of colMatch is -1, we shouldn't only apply the
|
||||||
|
// background color of colMatch.
|
||||||
|
// e.g. echo -e "\x1b[32;7mfoo\x1b[mbar" | fzf --ansi --color bg+:1,hl+:-1:underline
|
||||||
|
// echo -e "\x1b[42mfoo\x1b[mbar" | fzf --ansi --color bg+:1,hl+:-1:underline
|
||||||
|
if color.Fg().IsDefault() && origColor.HasBg() {
|
||||||
|
color = origColor
|
||||||
|
if curr.nth {
|
||||||
|
color = color.WithAttr(attrNth &^ tui.AttrRegular)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
color = origColor.MergeNonDefault(color)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
colors = append(colors, colorOffset{
|
||||||
|
offset: [2]int32{int32(start), int32(idx)}, color: color, match: true, url: url})
|
||||||
|
} else if curr.color {
|
||||||
|
ansi := itemColors[curr.index]
|
||||||
|
base := colBase
|
||||||
|
if curr.nth {
|
||||||
|
base = base.WithAttr(attrNth)
|
||||||
|
}
|
||||||
|
if hidden {
|
||||||
|
base = base.WithFg(theme.Nomatch)
|
||||||
|
}
|
||||||
|
color := ansiToColorPair(ansi, base)
|
||||||
colors = append(colors, colorOffset{
|
colors = append(colors, colorOffset{
|
||||||
offset: [2]int32{int32(start), int32(idx)},
|
offset: [2]int32{int32(start), int32(idx)},
|
||||||
color: tui.NewColorPair(fg, bg),
|
color: color,
|
||||||
attr: ansi.color.attr.Merge(attr)})
|
match: false,
|
||||||
|
url: ansi.color.url})
|
||||||
|
} else {
|
||||||
|
color := colBase.WithAttr(attrNth)
|
||||||
|
if hidden {
|
||||||
|
color = color.WithFg(theme.Nomatch)
|
||||||
|
}
|
||||||
|
colors = append(colors, colorOffset{
|
||||||
|
offset: [2]int32{int32(start), int32(idx)},
|
||||||
|
color: color,
|
||||||
|
match: false,
|
||||||
|
url: nil})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -200,7 +306,7 @@ func (a ByOrder) Less(i, j int) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ByRelevance is for sorting Items
|
// ByRelevance is for sorting Items
|
||||||
type ByRelevance []*Result
|
type ByRelevance []Result
|
||||||
|
|
||||||
func (a ByRelevance) Len() int {
|
func (a ByRelevance) Len() int {
|
||||||
return len(a)
|
return len(a)
|
||||||
@@ -211,11 +317,11 @@ func (a ByRelevance) Swap(i, j int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a ByRelevance) Less(i, j int) bool {
|
func (a ByRelevance) Less(i, j int) bool {
|
||||||
return compareRanks((*a[i]).rank, (*a[j]).rank, false)
|
return compareRanks(a[i], a[j], false)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ByRelevanceTac is for sorting Items
|
// ByRelevanceTac is for sorting Items
|
||||||
type ByRelevanceTac []*Result
|
type ByRelevanceTac []Result
|
||||||
|
|
||||||
func (a ByRelevanceTac) Len() int {
|
func (a ByRelevanceTac) Len() int {
|
||||||
return len(a)
|
return len(a)
|
||||||
@@ -226,18 +332,5 @@ func (a ByRelevanceTac) Swap(i, j int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a ByRelevanceTac) Less(i, j int) bool {
|
func (a ByRelevanceTac) Less(i, j int) bool {
|
||||||
return compareRanks((*a[i]).rank, (*a[j]).rank, true)
|
return compareRanks(a[i], a[j], true)
|
||||||
}
|
|
||||||
|
|
||||||
func compareRanks(irank rank, jrank rank, tac bool) bool {
|
|
||||||
for idx := 0; idx < 4; idx++ {
|
|
||||||
left := irank.points[idx]
|
|
||||||
right := jrank.points[idx]
|
|
||||||
if left < right {
|
|
||||||
return true
|
|
||||||
} else if left > right {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (irank.index <= jrank.index) != tac
|
|
||||||
}
|
}
|
||||||
|
|||||||
16
src/result_others.go
Normal file
16
src/result_others.go
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
//go:build !386 && !amd64
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
func compareRanks(irank Result, jrank Result, tac bool) bool {
|
||||||
|
for idx := 3; idx >= 0; idx-- {
|
||||||
|
left := irank.points[idx]
|
||||||
|
right := jrank.points[idx]
|
||||||
|
if left < right {
|
||||||
|
return true
|
||||||
|
} else if left > right {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (irank.item.Index() <= jrank.item.Index()) != tac
|
||||||
|
}
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
// +build !tcell
|
|
||||||
|
|
||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@@ -11,10 +9,15 @@ import (
|
|||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func withIndex(i *Item, index int) *Item {
|
||||||
|
(*i).text.Index = int32(index)
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
|
||||||
func TestOffsetSort(t *testing.T) {
|
func TestOffsetSort(t *testing.T) {
|
||||||
offsets := []Offset{
|
offsets := []Offset{
|
||||||
Offset{3, 5}, Offset{2, 7},
|
{3, 5}, {2, 7},
|
||||||
Offset{1, 3}, Offset{2, 9}}
|
{1, 3}, {2, 9}}
|
||||||
sort.Sort(ByOrder(offsets))
|
sort.Sort(ByOrder(offsets))
|
||||||
|
|
||||||
if offsets[0][0] != 1 || offsets[0][1] != 3 ||
|
if offsets[0][0] != 1 || offsets[0][1] != 3 ||
|
||||||
@@ -26,10 +29,10 @@ func TestOffsetSort(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestRankComparison(t *testing.T) {
|
func TestRankComparison(t *testing.T) {
|
||||||
rank := func(vals ...uint16) rank {
|
rank := func(vals ...uint16) Result {
|
||||||
return rank{
|
return Result{
|
||||||
points: [4]uint16{vals[0], vals[1], vals[2], vals[3]},
|
points: [4]uint16{vals[0], vals[1], vals[2], vals[3]},
|
||||||
index: int32(vals[4])}
|
item: &Item{text: util.Chars{Index: int32(vals[4])}}}
|
||||||
}
|
}
|
||||||
if compareRanks(rank(3, 0, 0, 0, 5), rank(2, 0, 0, 0, 7), false) ||
|
if compareRanks(rank(3, 0, 0, 0, 5), rank(2, 0, 0, 0, 7), false) ||
|
||||||
!compareRanks(rank(3, 0, 0, 0, 5), rank(3, 0, 0, 0, 6), false) ||
|
!compareRanks(rank(3, 0, 0, 0, 5), rank(3, 0, 0, 0, 6), false) ||
|
||||||
@@ -51,37 +54,42 @@ func TestResultRank(t *testing.T) {
|
|||||||
// FIXME global
|
// FIXME global
|
||||||
sortCriteria = []criterion{byScore, byLength}
|
sortCriteria = []criterion{byScore, byLength}
|
||||||
|
|
||||||
strs := [][]rune{[]rune("foo"), []rune("foobar"), []rune("bar"), []rune("baz")}
|
str := []rune("foo")
|
||||||
item1 := buildResult(&Item{text: util.RunesToChars(strs[0]), index: 1}, []Offset{}, 2, 3)
|
item1 := buildResult(
|
||||||
if item1.rank.points[0] != math.MaxUint16-2 || // Bonus
|
withIndex(&Item{text: util.RunesToChars(str)}, 1), []Offset{}, 2)
|
||||||
item1.rank.points[1] != 3 || // Length
|
if item1.points[3] != math.MaxUint16-2 || // Bonus
|
||||||
item1.rank.points[2] != 0 || // Unused
|
item1.points[2] != 3 || // Length
|
||||||
item1.rank.points[3] != 0 || // Unused
|
item1.points[1] != 0 || // Unused
|
||||||
item1.item.index != 1 {
|
item1.points[0] != 0 || // Unused
|
||||||
t.Error(item1.rank)
|
item1.item.Index() != 1 {
|
||||||
|
t.Error(item1)
|
||||||
}
|
}
|
||||||
// Only differ in index
|
// Only differ in index
|
||||||
item2 := buildResult(&Item{text: util.RunesToChars(strs[0])}, []Offset{}, 2, 3)
|
item2 := buildResult(&Item{text: util.RunesToChars(str)}, []Offset{}, 2)
|
||||||
|
|
||||||
items := []*Result{item1, item2}
|
items := []Result{item1, item2}
|
||||||
sort.Sort(ByRelevance(items))
|
sort.Sort(ByRelevance(items))
|
||||||
if items[0] != item2 || items[1] != item1 {
|
if items[0] != item2 || items[1] != item1 {
|
||||||
t.Error(items)
|
t.Error(items)
|
||||||
}
|
}
|
||||||
|
|
||||||
items = []*Result{item2, item1, item1, item2}
|
items = []Result{item2, item1, item1, item2}
|
||||||
sort.Sort(ByRelevance(items))
|
sort.Sort(ByRelevance(items))
|
||||||
if items[0] != item2 || items[1] != item2 ||
|
if items[0] != item2 || items[1] != item2 ||
|
||||||
items[2] != item1 || items[3] != item1 {
|
items[2] != item1 || items[3] != item1 {
|
||||||
t.Error(items, item1, item1.item.index, item2, item2.item.index)
|
t.Error(items, item1, item1.item.Index(), item2, item2.item.Index())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort by relevance
|
// Sort by relevance
|
||||||
item3 := buildResult(&Item{index: 2}, []Offset{Offset{1, 3}, Offset{5, 7}}, 3, 0)
|
item3 := buildResult(
|
||||||
item4 := buildResult(&Item{index: 2}, []Offset{Offset{1, 2}, Offset{6, 7}}, 4, 0)
|
withIndex(&Item{}, 2), []Offset{{1, 3}, {5, 7}}, 3)
|
||||||
item5 := buildResult(&Item{index: 2}, []Offset{Offset{1, 3}, Offset{5, 7}}, 5, 0)
|
item4 := buildResult(
|
||||||
item6 := buildResult(&Item{index: 2}, []Offset{Offset{1, 2}, Offset{6, 7}}, 6, 0)
|
withIndex(&Item{}, 2), []Offset{{1, 2}, {6, 7}}, 4)
|
||||||
items = []*Result{item1, item2, item3, item4, item5, item6}
|
item5 := buildResult(
|
||||||
|
withIndex(&Item{}, 2), []Offset{{1, 3}, {5, 7}}, 5)
|
||||||
|
item6 := buildResult(
|
||||||
|
withIndex(&Item{}, 2), []Offset{{1, 2}, {6, 7}}, 6)
|
||||||
|
items = []Result{item1, item2, item3, item4, item5, item6}
|
||||||
sort.Sort(ByRelevance(items))
|
sort.Sort(ByRelevance(items))
|
||||||
if !(items[0] == item6 && items[1] == item5 &&
|
if !(items[0] == item6 && items[1] == item5 &&
|
||||||
items[2] == item4 && items[3] == item3 &&
|
items[2] == item4 && items[3] == item3 &&
|
||||||
@@ -90,37 +98,87 @@ func TestResultRank(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestChunkTiebreak(t *testing.T) {
|
||||||
|
// FIXME global
|
||||||
|
sortCriteria = []criterion{byScore, byChunk}
|
||||||
|
|
||||||
|
score := 100
|
||||||
|
test := func(input string, offset Offset, chunk string) {
|
||||||
|
item := buildResult(withIndex(&Item{text: util.RunesToChars([]rune(input))}, 1), []Offset{offset}, score)
|
||||||
|
if !(item.points[3] == math.MaxUint16-uint16(score) && item.points[2] == uint16(len(chunk))) {
|
||||||
|
t.Error(item.points)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
test("hello foobar goodbye", Offset{8, 9}, "foobar")
|
||||||
|
test("hello foobar goodbye", Offset{7, 18}, "foobar goodbye")
|
||||||
|
test("hello foobar goodbye", Offset{0, 1}, "hello")
|
||||||
|
test("hello foobar goodbye", Offset{5, 7}, "hello foobar") // TBD
|
||||||
|
}
|
||||||
|
|
||||||
func TestColorOffset(t *testing.T) {
|
func TestColorOffset(t *testing.T) {
|
||||||
// ------------ 20 ---- -- ----
|
// ------------ 20 ---- -- ----
|
||||||
// ++++++++ ++++++++++
|
// ++++++++ ++++++++++
|
||||||
// --++++++++-- --++++++++++---
|
// --++++++++-- --++++++++++---
|
||||||
|
|
||||||
offsets := []Offset{Offset{5, 15}, Offset{25, 35}}
|
offsets := []Offset{{5, 15}, {10, 12}, {25, 35}}
|
||||||
item := Result{
|
item := Result{
|
||||||
item: &Item{
|
item: &Item{
|
||||||
colors: &[]ansiOffset{
|
colors: &[]ansiOffset{
|
||||||
ansiOffset{[2]int32{0, 20}, ansiState{1, 5, 0}},
|
{[2]int32{0, 20}, ansiState{1, 5, 0, -1, nil}},
|
||||||
ansiOffset{[2]int32{22, 27}, ansiState{2, 6, tui.Bold}},
|
{[2]int32{22, 27}, ansiState{2, 6, tui.Bold, -1, nil}},
|
||||||
ansiOffset{[2]int32{30, 32}, ansiState{3, 7, 0}},
|
{[2]int32{30, 32}, ansiState{3, 7, 0, -1, nil}},
|
||||||
ansiOffset{[2]int32{33, 40}, ansiState{4, 8, tui.Bold}}}}}
|
{[2]int32{33, 40}, ansiState{4, 8, tui.Bold, -1, nil}}}}}
|
||||||
// [{[0 5] 9 false} {[5 15] 99 false} {[15 20] 9 false} {[22 25] 10 true} {[25 35] 99 false} {[35 40] 11 true}]
|
|
||||||
|
|
||||||
pair := tui.NewColorPair(99, 199)
|
colBase := tui.NewColorPair(89, 189, tui.AttrUndefined)
|
||||||
colors := item.colorOffsets(offsets, tui.Dark256, pair, tui.AttrRegular, true)
|
colMatch := tui.NewColorPair(99, 199, tui.AttrUndefined)
|
||||||
assert := func(idx int, b int32, e int32, c tui.ColorPair, bold bool) {
|
colors := item.colorOffsets(offsets, nil, tui.Dark256, colBase, colMatch, tui.AttrUndefined, false)
|
||||||
var attr tui.Attr
|
assert := func(idx int, b int32, e int32, c tui.ColorPair) {
|
||||||
if bold {
|
|
||||||
attr = tui.Bold
|
|
||||||
}
|
|
||||||
o := colors[idx]
|
o := colors[idx]
|
||||||
if o.offset[0] != b || o.offset[1] != e || o.color != c || o.attr != attr {
|
if o.offset[0] != b || o.offset[1] != e || o.color != c {
|
||||||
t.Error(o)
|
t.Error(o, b, e, c)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert(0, 0, 5, tui.NewColorPair(1, 5), false)
|
// [{[0 5] {1 5 0}} {[5 15] {99 199 0}} {[15 20] {1 5 0}}
|
||||||
assert(1, 5, 15, pair, false)
|
// {[22 25] {2 6 1}} {[25 27] {99 199 1}} {[27 30] {99 199 0}}
|
||||||
assert(2, 15, 20, tui.NewColorPair(1, 5), false)
|
// {[30 32] {99 199 0}} {[32 33] {99 199 0}} {[33 35] {99 199 1}}
|
||||||
assert(3, 22, 25, tui.NewColorPair(2, 6), true)
|
// {[35 40] {4 8 1}}]
|
||||||
assert(4, 25, 35, pair, false)
|
assert(0, 0, 5, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||||
assert(5, 35, 40, tui.NewColorPair(4, 8), true)
|
assert(1, 5, 15, colMatch)
|
||||||
|
assert(2, 15, 20, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||||
|
assert(3, 22, 25, tui.NewColorPair(2, 6, tui.Bold))
|
||||||
|
assert(4, 25, 27, colMatch.WithAttr(tui.Bold))
|
||||||
|
assert(5, 27, 30, colMatch)
|
||||||
|
assert(6, 30, 32, colMatch)
|
||||||
|
assert(7, 32, 33, colMatch) // TODO: Should we merge consecutive blocks?
|
||||||
|
assert(8, 33, 35, colMatch.WithAttr(tui.Bold))
|
||||||
|
assert(9, 35, 40, tui.NewColorPair(4, 8, tui.Bold))
|
||||||
|
|
||||||
|
colRegular := tui.NewColorPair(-1, -1, tui.AttrUndefined)
|
||||||
|
colUnderline := tui.NewColorPair(-1, -1, tui.Underline)
|
||||||
|
|
||||||
|
nthOffsets := []Offset{{37, 39}, {42, 45}}
|
||||||
|
for _, attr := range []tui.Attr{tui.AttrRegular, tui.StrikeThrough} {
|
||||||
|
colors = item.colorOffsets(offsets, nthOffsets, tui.Dark256, colRegular, colUnderline, attr, false)
|
||||||
|
|
||||||
|
// [{[0 5] {1 5 0}} {[5 15] {1 5 8}} {[15 20] {1 5 0}}
|
||||||
|
// {[22 25] {2 6 1}} {[25 27] {2 6 9}} {[27 30] {-1 -1 8}}
|
||||||
|
// {[30 32] {3 7 8}} {[32 33] {-1 -1 8}} {[33 35] {4 8 9}}
|
||||||
|
// {[35 37] {4 8 1}} {[37 39] {4 8 x|1}} {[39 40] {4 8 x|1}}]
|
||||||
|
assert(0, 0, 5, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||||
|
assert(1, 5, 15, tui.NewColorPair(1, 5, tui.Underline))
|
||||||
|
assert(2, 15, 20, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
||||||
|
assert(3, 22, 25, tui.NewColorPair(2, 6, tui.Bold))
|
||||||
|
assert(4, 25, 27, tui.NewColorPair(2, 6, tui.Bold|tui.Underline))
|
||||||
|
assert(5, 27, 30, colUnderline)
|
||||||
|
assert(6, 30, 32, tui.NewColorPair(3, 7, tui.Underline))
|
||||||
|
assert(7, 32, 33, colUnderline)
|
||||||
|
assert(8, 33, 35, tui.NewColorPair(4, 8, tui.Bold|tui.Underline))
|
||||||
|
assert(9, 35, 37, tui.NewColorPair(4, 8, tui.Bold))
|
||||||
|
expected := tui.Bold | attr
|
||||||
|
if attr == tui.AttrRegular {
|
||||||
|
expected = tui.Bold
|
||||||
|
}
|
||||||
|
assert(10, 37, 39, tui.NewColorPair(4, 8, expected))
|
||||||
|
assert(11, 39, 40, tui.NewColorPair(4, 8, tui.Bold))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
16
src/result_x86.go
Normal file
16
src/result_x86.go
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
//go:build 386 || amd64
|
||||||
|
|
||||||
|
package fzf
|
||||||
|
|
||||||
|
import "unsafe"
|
||||||
|
|
||||||
|
func compareRanks(irank Result, jrank Result, tac bool) bool {
|
||||||
|
left := *(*uint64)(unsafe.Pointer(&irank.points[0]))
|
||||||
|
right := *(*uint64)(unsafe.Pointer(&jrank.points[0]))
|
||||||
|
if left < right {
|
||||||
|
return true
|
||||||
|
} else if left > right {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return (irank.item.Index() <= jrank.item.Index()) != tac
|
||||||
|
}
|
||||||
270
src/server.go
Normal file
270
src/server.go
Normal file
@@ -0,0 +1,270 @@
|
|||||||
|
package fzf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"crypto/subtle"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var getRegex *regexp.Regexp
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
getRegex = regexp.MustCompile(`^GET /(?:\?([a-z0-9=&]+))? HTTP`)
|
||||||
|
}
|
||||||
|
|
||||||
|
type getParams struct {
|
||||||
|
limit int
|
||||||
|
offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
crlf = "\r\n"
|
||||||
|
httpOk = "HTTP/1.1 200 OK" + crlf
|
||||||
|
httpBadRequest = "HTTP/1.1 400 Bad Request" + crlf
|
||||||
|
httpUnauthorized = "HTTP/1.1 401 Unauthorized" + crlf
|
||||||
|
httpUnavailable = "HTTP/1.1 503 Service Unavailable" + crlf
|
||||||
|
httpReadTimeout = 10 * time.Second
|
||||||
|
channelTimeout = 2 * time.Second
|
||||||
|
jsonContentType = "Content-Type: application/json" + crlf
|
||||||
|
maxContentLength = 1024 * 1024
|
||||||
|
)
|
||||||
|
|
||||||
|
type httpServer struct {
|
||||||
|
apiKey []byte
|
||||||
|
actionChannel chan []*action
|
||||||
|
getHandler func(getParams) string
|
||||||
|
}
|
||||||
|
|
||||||
|
type listenAddress struct {
|
||||||
|
host string
|
||||||
|
port int
|
||||||
|
sock string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (addr listenAddress) IsLocal() bool {
|
||||||
|
return addr.host == "localhost" || addr.host == "127.0.0.1" || len(addr.sock) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultListenAddr = listenAddress{"localhost", 0, ""}
|
||||||
|
|
||||||
|
func parseListenAddress(address string) (listenAddress, error) {
|
||||||
|
if strings.HasSuffix(address, ".sock") {
|
||||||
|
return listenAddress{"", 0, address}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.SplitN(address, ":", 3)
|
||||||
|
if len(parts) == 1 {
|
||||||
|
parts = []string{"localhost", parts[0]}
|
||||||
|
}
|
||||||
|
if len(parts) != 2 {
|
||||||
|
return defaultListenAddr, fmt.Errorf("invalid listen address: %s", address)
|
||||||
|
}
|
||||||
|
portStr := parts[len(parts)-1]
|
||||||
|
port, err := strconv.Atoi(portStr)
|
||||||
|
if err != nil || port < 0 || port > 65535 {
|
||||||
|
return defaultListenAddr, fmt.Errorf("invalid listen port: %s", portStr)
|
||||||
|
}
|
||||||
|
if len(parts[0]) == 0 {
|
||||||
|
parts[0] = "localhost"
|
||||||
|
}
|
||||||
|
return listenAddress{parts[0], port, ""}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func startHttpServer(address listenAddress, actionChannel chan []*action, getHandler func(getParams) string) (net.Listener, int, error) {
|
||||||
|
host := address.host
|
||||||
|
port := address.port
|
||||||
|
apiKey := os.Getenv("FZF_API_KEY")
|
||||||
|
if !address.IsLocal() && len(apiKey) == 0 {
|
||||||
|
return nil, port, errors.New("FZF_API_KEY is required to allow remote access")
|
||||||
|
}
|
||||||
|
|
||||||
|
var listener net.Listener
|
||||||
|
var err error
|
||||||
|
if len(address.sock) > 0 {
|
||||||
|
if _, err := os.Stat(address.sock); err == nil {
|
||||||
|
// Check if the socket is already in use
|
||||||
|
if conn, err := net.Dial("unix", address.sock); err == nil {
|
||||||
|
conn.Close()
|
||||||
|
return nil, 0, fmt.Errorf("socket already in use: %s", address.sock)
|
||||||
|
}
|
||||||
|
os.Remove(address.sock)
|
||||||
|
}
|
||||||
|
listener, err = net.Listen("unix", address.sock)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, fmt.Errorf("failed to listen on %s", address.sock)
|
||||||
|
}
|
||||||
|
os.Chmod(address.sock, 0600)
|
||||||
|
} else {
|
||||||
|
addrStr := fmt.Sprintf("%s:%d", host, port)
|
||||||
|
listener, err = net.Listen("tcp", addrStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, port, fmt.Errorf("failed to listen on %s", addrStr)
|
||||||
|
}
|
||||||
|
if port == 0 {
|
||||||
|
addr := listener.Addr().String()
|
||||||
|
parts := strings.Split(addr, ":")
|
||||||
|
if len(parts) < 2 {
|
||||||
|
return nil, port, fmt.Errorf("cannot extract port: %s", addr)
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
port, err = strconv.Atoi(parts[len(parts)-1])
|
||||||
|
if err != nil {
|
||||||
|
return nil, port, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
server := httpServer{
|
||||||
|
apiKey: []byte(apiKey),
|
||||||
|
actionChannel: actionChannel,
|
||||||
|
getHandler: getHandler,
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
conn, err := listener.Accept()
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, net.ErrClosed) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
conn.Write([]byte(server.handleHttpRequest(conn)))
|
||||||
|
conn.Close()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return listener, port, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Here we are writing a simplistic HTTP server without using net/http
|
||||||
|
// package to reduce the size of the binary.
|
||||||
|
//
|
||||||
|
// * No --listen: 2.8MB
|
||||||
|
// * --listen with net/http: 5.7MB
|
||||||
|
// * --listen w/o net/http: 3.3MB
|
||||||
|
func (server *httpServer) handleHttpRequest(conn net.Conn) string {
|
||||||
|
contentLength := 0
|
||||||
|
apiKey := ""
|
||||||
|
body := ""
|
||||||
|
answer := func(code string, message string) string {
|
||||||
|
message += "\n"
|
||||||
|
return code + fmt.Sprintf("Content-Length: %d%s", len(message), crlf+crlf+message)
|
||||||
|
}
|
||||||
|
unauthorized := func(message string) string {
|
||||||
|
return answer(httpUnauthorized, message)
|
||||||
|
}
|
||||||
|
bad := func(message string) string {
|
||||||
|
return answer(httpBadRequest, message)
|
||||||
|
}
|
||||||
|
good := func(message string) string {
|
||||||
|
return answer(httpOk+jsonContentType, message)
|
||||||
|
}
|
||||||
|
conn.SetReadDeadline(time.Now().Add(httpReadTimeout))
|
||||||
|
scanner := bufio.NewScanner(conn)
|
||||||
|
scanner.Split(func(data []byte, atEOF bool) (int, []byte, error) {
|
||||||
|
found := bytes.Index(data, []byte(crlf))
|
||||||
|
if found >= 0 {
|
||||||
|
token := data[:found+len(crlf)]
|
||||||
|
return len(token), token, nil
|
||||||
|
}
|
||||||
|
if atEOF || len(body)+len(data) >= contentLength {
|
||||||
|
return 0, data, bufio.ErrFinalToken
|
||||||
|
}
|
||||||
|
return 0, nil, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
section := 0
|
||||||
|
for scanner.Scan() {
|
||||||
|
text := scanner.Text()
|
||||||
|
switch section {
|
||||||
|
case 0:
|
||||||
|
getMatch := getRegex.FindStringSubmatch(text)
|
||||||
|
if len(getMatch) > 0 {
|
||||||
|
response := server.getHandler(parseGetParams(getMatch[1]))
|
||||||
|
if len(response) > 0 {
|
||||||
|
return good(response)
|
||||||
|
}
|
||||||
|
return answer(httpUnavailable+jsonContentType, `{"error":"timeout"}`)
|
||||||
|
} else if !strings.HasPrefix(text, "POST / HTTP") {
|
||||||
|
return bad("invalid request method")
|
||||||
|
}
|
||||||
|
section++
|
||||||
|
case 1:
|
||||||
|
if text == crlf {
|
||||||
|
if contentLength == 0 {
|
||||||
|
return bad("content-length header missing")
|
||||||
|
}
|
||||||
|
section++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pair := strings.SplitN(text, ":", 2)
|
||||||
|
if len(pair) == 2 {
|
||||||
|
switch strings.ToLower(pair[0]) {
|
||||||
|
case "content-length":
|
||||||
|
length, err := strconv.Atoi(strings.TrimSpace(pair[1]))
|
||||||
|
if err != nil || length <= 0 || length > maxContentLength {
|
||||||
|
return bad("invalid content length")
|
||||||
|
}
|
||||||
|
contentLength = length
|
||||||
|
case "x-api-key":
|
||||||
|
apiKey = strings.TrimSpace(pair[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case 2:
|
||||||
|
body += text
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(server.apiKey) != 0 && subtle.ConstantTimeCompare([]byte(apiKey), server.apiKey) != 1 {
|
||||||
|
return unauthorized("invalid api key")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(body) < contentLength {
|
||||||
|
return bad("incomplete request")
|
||||||
|
}
|
||||||
|
body = body[:contentLength]
|
||||||
|
|
||||||
|
actions, err := parseSingleActionList(strings.Trim(string(body), "\r\n"))
|
||||||
|
if err != nil {
|
||||||
|
return bad(err.Error())
|
||||||
|
}
|
||||||
|
if len(actions) == 0 {
|
||||||
|
return bad("no action specified")
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case server.actionChannel <- actions:
|
||||||
|
case <-time.After(channelTimeout):
|
||||||
|
return httpUnavailable + crlf
|
||||||
|
}
|
||||||
|
return httpOk + crlf
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseGetParams(query string) getParams {
|
||||||
|
params := getParams{limit: 100, offset: 0}
|
||||||
|
for _, pair := range strings.Split(query, "&") {
|
||||||
|
parts := strings.SplitN(pair, "=", 2)
|
||||||
|
if len(parts) == 2 {
|
||||||
|
switch parts[0] {
|
||||||
|
case "limit", "offset":
|
||||||
|
if val, err := strconv.Atoi(parts[1]); err == nil {
|
||||||
|
if parts[0] == "limit" {
|
||||||
|
params.limit = val
|
||||||
|
} else {
|
||||||
|
params.offset = val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return params
|
||||||
|
}
|
||||||
7297
src/terminal.go
7297
src/terminal.go
File diff suppressed because it is too large
Load Diff
@@ -1,25 +1,43 @@
|
|||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
func newItem(str string) *Item {
|
func replacePlaceholderTest(template string, stripAnsi bool, delimiter Delimiter, printsep string, forcePlus bool, query string, allItems [3][]*Item) string {
|
||||||
bytes := []byte(str)
|
replaced, _ := replacePlaceholder(replacePlaceholderParams{
|
||||||
trimmed, _, _ := extractColor(str, nil, nil)
|
template: template,
|
||||||
return &Item{origText: &bytes, text: util.RunesToChars([]rune(trimmed))}
|
stripAnsi: stripAnsi,
|
||||||
|
delimiter: delimiter,
|
||||||
|
printsep: printsep,
|
||||||
|
forcePlus: forcePlus,
|
||||||
|
query: query,
|
||||||
|
allItems: allItems,
|
||||||
|
lastAction: actBackwardDeleteCharEof,
|
||||||
|
prompt: "prompt",
|
||||||
|
executor: util.NewExecutor(""),
|
||||||
|
})
|
||||||
|
return replaced
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestReplacePlaceholder(t *testing.T) {
|
func TestReplacePlaceholder(t *testing.T) {
|
||||||
item1 := newItem(" foo'bar \x1b[31mbaz\x1b[m")
|
item1 := newItem(" foo'bar \x1b[31mbaz\x1b[m")
|
||||||
items1 := []*Item{item1, item1}
|
items1 := [3][]*Item{{item1}, {item1}, nil}
|
||||||
items2 := []*Item{
|
items2 := [3][]*Item{
|
||||||
newItem("foo'bar \x1b[31mbaz\x1b[m"),
|
{newItem("foo'bar \x1b[31mbaz\x1b[m")},
|
||||||
newItem("foo'bar \x1b[31mbaz\x1b[m"),
|
{newItem("foo'bar \x1b[31mbaz\x1b[m"),
|
||||||
newItem("FOO'BAR \x1b[31mBAZ\x1b[m")}
|
newItem("FOO'BAR \x1b[31mBAZ\x1b[m")}, nil}
|
||||||
|
|
||||||
|
delim := "'"
|
||||||
|
var regex *regexp.Regexp
|
||||||
|
|
||||||
var result string
|
var result string
|
||||||
check := func(expected string) {
|
check := func(expected string) {
|
||||||
@@ -27,67 +45,657 @@ func TestReplacePlaceholder(t *testing.T) {
|
|||||||
t.Errorf("expected: %s, actual: %s", expected, result)
|
t.Errorf("expected: %s, actual: %s", expected, result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// helper function that converts template format into string and carries out the check()
|
||||||
|
checkFormat := func(format string) {
|
||||||
|
type quotes struct{ O, I, S string } // outer, inner quotes, print separator
|
||||||
|
unixStyle := quotes{`'`, `'\''`, "\n"}
|
||||||
|
windowsStyle := quotes{`^"`, `'`, "\n"}
|
||||||
|
var effectiveStyle quotes
|
||||||
|
|
||||||
|
if util.IsWindows() {
|
||||||
|
effectiveStyle = windowsStyle
|
||||||
|
} else {
|
||||||
|
effectiveStyle = unixStyle
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := templateToString(format, effectiveStyle)
|
||||||
|
check(expected)
|
||||||
|
}
|
||||||
|
printsep := "\n"
|
||||||
|
|
||||||
|
/*
|
||||||
|
Test multiple placeholders and the function parameters.
|
||||||
|
*/
|
||||||
|
|
||||||
// {}, preserve ansi
|
// {}, preserve ansi
|
||||||
result = replacePlaceholder("echo {}", false, Delimiter{}, false, "query", items1)
|
result = replacePlaceholderTest("echo {}", false, Delimiter{}, printsep, false, "query", items1)
|
||||||
check("echo ' foo'\\''bar \x1b[31mbaz\x1b[m'")
|
checkFormat("echo {{.O}} foo{{.I}}bar \x1b[31mbaz\x1b[m{{.O}}")
|
||||||
|
|
||||||
// {}, strip ansi
|
// {}, strip ansi
|
||||||
result = replacePlaceholder("echo {}", true, Delimiter{}, false, "query", items1)
|
result = replacePlaceholderTest("echo {}", true, Delimiter{}, printsep, false, "query", items1)
|
||||||
check("echo ' foo'\\''bar baz'")
|
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}")
|
||||||
|
|
||||||
|
// {r}, strip ansi
|
||||||
|
result = replacePlaceholderTest("echo {r}", true, Delimiter{}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo foo'bar baz")
|
||||||
|
|
||||||
|
// {r..}, strip ansi
|
||||||
|
result = replacePlaceholderTest("echo {r..}", true, Delimiter{}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo foo'bar baz")
|
||||||
|
|
||||||
// {}, with multiple items
|
// {}, with multiple items
|
||||||
result = replacePlaceholder("echo {}", true, Delimiter{}, false, "query", items2)
|
result = replacePlaceholderTest("echo {}", true, Delimiter{}, printsep, false, "query", items2)
|
||||||
check("echo 'foo'\\''bar baz'")
|
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}}")
|
||||||
|
|
||||||
// {..}, strip leading whitespaces, preserve ansi
|
// {..}, strip leading whitespaces, preserve ansi
|
||||||
result = replacePlaceholder("echo {..}", false, Delimiter{}, false, "query", items1)
|
result = replacePlaceholderTest("echo {..}", false, Delimiter{}, printsep, false, "query", items1)
|
||||||
check("echo 'foo'\\''bar \x1b[31mbaz\x1b[m'")
|
checkFormat("echo {{.O}}foo{{.I}}bar \x1b[31mbaz\x1b[m{{.O}}")
|
||||||
|
|
||||||
// {..}, strip leading whitespaces, strip ansi
|
// {..}, strip leading whitespaces, strip ansi
|
||||||
result = replacePlaceholder("echo {..}", true, Delimiter{}, false, "query", items1)
|
result = replacePlaceholderTest("echo {..}", true, Delimiter{}, printsep, false, "query", items1)
|
||||||
check("echo 'foo'\\''bar baz'")
|
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}}")
|
||||||
|
|
||||||
// {q}
|
// {q}
|
||||||
result = replacePlaceholder("echo {} {q}", true, Delimiter{}, false, "query", items1)
|
result = replacePlaceholderTest("echo {} {q}", true, Delimiter{}, printsep, false, "query", items1)
|
||||||
check("echo ' foo'\\''bar baz' 'query'")
|
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}} {{.O}}query{{.O}}")
|
||||||
|
|
||||||
// {q}, multiple items
|
// {q}, multiple items
|
||||||
result = replacePlaceholder("echo {+}{q}{+}", true, Delimiter{}, false, "query 'string'", items2)
|
result = replacePlaceholderTest("echo {+}{q}{+}", true, Delimiter{}, printsep, false, "query 'string'", items2)
|
||||||
check("echo 'foo'\\''bar baz' 'FOO'\\''BAR BAZ''query '\\''string'\\''''foo'\\''bar baz' 'FOO'\\''BAR BAZ'")
|
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}{{.O}}query {{.I}}string{{.I}}{{.O}}{{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}")
|
||||||
|
|
||||||
result = replacePlaceholder("echo {}{q}{}", true, Delimiter{}, false, "query 'string'", items2)
|
result = replacePlaceholderTest("echo {}{q}{}", true, Delimiter{}, printsep, false, "query 'string'", items2)
|
||||||
check("echo 'foo'\\''bar baz''query '\\''string'\\''''foo'\\''bar baz'")
|
checkFormat("echo {{.O}}foo{{.I}}bar baz{{.O}}{{.O}}query {{.I}}string{{.I}}{{.O}}{{.O}}foo{{.I}}bar baz{{.O}}")
|
||||||
|
|
||||||
result = replacePlaceholder("echo {1}/{2}/{2,1}/{-1}/{-2}/{}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, false, "query", items1)
|
result = replacePlaceholderTest("echo {1}/{2}/{2,1}/{-1}/{-2}/{}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, printsep, false, "query", items1)
|
||||||
check("echo 'foo'\\''bar'/'baz'/'bazfoo'\\''bar'/'baz'/'foo'\\''bar'/' foo'\\''bar baz'/'foo'\\''bar baz'/{n.t}/{}/{1}/{q}/''")
|
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}}/{{.O}}baz{{.O}}/{{.O}}bazfoo{{.I}}bar{{.O}}/{{.O}}baz{{.O}}/{{.O}}foo{{.I}}bar{{.O}}/{{.O}} foo{{.I}}bar baz{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}}")
|
||||||
|
|
||||||
result = replacePlaceholder("echo {1}/{2}/{-1}/{-2}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, false, "query", items2)
|
result = replacePlaceholderTest("echo {1}/{2}/{-1}/{-2}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, printsep, false, "query", items2)
|
||||||
check("echo 'foo'\\''bar'/'baz'/'baz'/'foo'\\''bar'/'foo'\\''bar baz'/{n.t}/{}/{1}/{q}/''")
|
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}}/{{.O}}baz{{.O}}/{{.O}}baz{{.O}}/{{.O}}foo{{.I}}bar{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}}")
|
||||||
|
|
||||||
result = replacePlaceholder("echo {+1}/{+2}/{+-1}/{+-2}/{+..}/{n.t}/\\{}/\\{1}/\\{q}/{+3}", true, Delimiter{}, false, "query", items2)
|
result = replacePlaceholderTest("echo {+1}/{+2}/{+-1}/{+-2}/{+..}/{n.t}/\\{}/\\{1}/\\{q}/{+3}", true, Delimiter{}, printsep, false, "query", items2)
|
||||||
check("echo 'foo'\\''bar' 'FOO'\\''BAR'/'baz' 'BAZ'/'baz' 'BAZ'/'foo'\\''bar' 'FOO'\\''BAR'/'foo'\\''bar baz' 'FOO'\\''BAR BAZ'/{n.t}/{}/{1}/{q}/'' ''")
|
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}} {{.O}}{{.O}}")
|
||||||
|
|
||||||
// forcePlus
|
// forcePlus
|
||||||
result = replacePlaceholder("echo {1}/{2}/{-1}/{-2}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, true, "query", items2)
|
result = replacePlaceholderTest("echo {1}/{2}/{-1}/{-2}/{..}/{n.t}/\\{}/\\{1}/\\{q}/{3}", true, Delimiter{}, printsep, true, "query", items2)
|
||||||
check("echo 'foo'\\''bar' 'FOO'\\''BAR'/'baz' 'BAZ'/'baz' 'BAZ'/'foo'\\''bar' 'FOO'\\''BAR'/'foo'\\''bar baz' 'FOO'\\''BAR BAZ'/{n.t}/{}/{1}/{q}/'' ''")
|
checkFormat("echo {{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}baz{{.O}} {{.O}}BAZ{{.O}}/{{.O}}foo{{.I}}bar{{.O}} {{.O}}FOO{{.I}}BAR{{.O}}/{{.O}}foo{{.I}}bar baz{{.O}} {{.O}}FOO{{.I}}BAR BAZ{{.O}}/{n.t}/{}/{1}/{q}/{{.O}}{{.O}} {{.O}}{{.O}}")
|
||||||
|
|
||||||
|
// Whitespace preserving flag with "'" delimiter
|
||||||
|
result = replacePlaceholderTest("echo {s1}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo {{.O}} foo{{.O}}")
|
||||||
|
|
||||||
|
result = replacePlaceholderTest("echo {s2}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo {{.O}}bar baz{{.O}}")
|
||||||
|
|
||||||
|
result = replacePlaceholderTest("echo {s}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}")
|
||||||
|
|
||||||
|
result = replacePlaceholderTest("echo {s..}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}")
|
||||||
|
|
||||||
|
// Whitespace preserving flag with regex delimiter
|
||||||
|
regex = regexp.MustCompile(`\w+`)
|
||||||
|
|
||||||
|
result = replacePlaceholderTest("echo {s1}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo {{.O}} {{.O}}")
|
||||||
|
|
||||||
|
result = replacePlaceholderTest("echo {s2}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo {{.O}}{{.I}}{{.O}}")
|
||||||
|
|
||||||
|
result = replacePlaceholderTest("echo {s3}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||||
|
checkFormat("echo {{.O}} {{.O}}")
|
||||||
|
|
||||||
// No match
|
// No match
|
||||||
result = replacePlaceholder("echo {}/{+}", true, Delimiter{}, false, "query", []*Item{nil, nil})
|
result = replacePlaceholderTest("echo {}/{+}", true, Delimiter{}, printsep, false, "query", [3][]*Item{nil, nil, nil})
|
||||||
check("echo /")
|
check("echo /")
|
||||||
|
|
||||||
// No match, but with selections
|
// No match, but with selections
|
||||||
result = replacePlaceholder("echo {}/{+}", true, Delimiter{}, false, "query", []*Item{nil, item1})
|
result = replacePlaceholderTest("echo {}/{+}", true, Delimiter{}, printsep, false, "query", [3][]*Item{nil, {item1}, nil})
|
||||||
check("echo /' foo'\\''bar baz'")
|
checkFormat("echo /{{.O}} foo{{.I}}bar baz{{.O}}")
|
||||||
|
|
||||||
// String delimiter
|
// String delimiter
|
||||||
delim := "'"
|
result = replacePlaceholderTest("echo {}/{1}/{2}", true, Delimiter{str: &delim}, printsep, false, "query", items1)
|
||||||
result = replacePlaceholder("echo {}/{1}/{2}", true, Delimiter{str: &delim}, false, "query", items1)
|
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}/{{.O}}foo{{.O}}/{{.O}}bar baz{{.O}}")
|
||||||
check("echo ' foo'\\''bar baz'/'foo'/'bar baz'")
|
|
||||||
|
|
||||||
// Regex delimiter
|
// Regex delimiter
|
||||||
regex := regexp.MustCompile("[oa]+")
|
regex = regexp.MustCompile("[oa]+")
|
||||||
// foo'bar baz
|
// foo'bar baz
|
||||||
result = replacePlaceholder("echo {}/{1}/{3}/{2..3}", true, Delimiter{regex: regex}, false, "query", items1)
|
result = replacePlaceholderTest("echo {}/{1}/{3}/{2..3}", true, Delimiter{regex: regex}, printsep, false, "query", items1)
|
||||||
check("echo ' foo'\\''bar baz'/'f'/'r b'/''\\''bar b'")
|
checkFormat("echo {{.O}} foo{{.I}}bar baz{{.O}}/{{.O}}f{{.O}}/{{.O}}r b{{.O}}/{{.O}}{{.I}}bar b{{.O}}")
|
||||||
|
|
||||||
|
/*
|
||||||
|
Test single placeholders, but focus on the placeholders' parameters (e.g. flags).
|
||||||
|
see: TestParsePlaceholder
|
||||||
|
*/
|
||||||
|
items3 := [3][]*Item{
|
||||||
|
// single line
|
||||||
|
{newItem("1a 1b 1c 1d 1e 1f")},
|
||||||
|
// multi line
|
||||||
|
{newItem("1a 1b 1c 1d 1e 1f"),
|
||||||
|
newItem("2a 2b 2c 2d 2e 2f"),
|
||||||
|
newItem("3a 3b 3c 3d 3e 3f"),
|
||||||
|
newItem("4a 4b 4c 4d 4e 4f"),
|
||||||
|
newItem("5a 5b 5c 5d 5e 5f"),
|
||||||
|
newItem("6a 6b 6c 6d 6e 6f"),
|
||||||
|
newItem("7a 7b 7c 7d 7e 7f")},
|
||||||
|
nil,
|
||||||
|
}
|
||||||
|
stripAnsi := false
|
||||||
|
forcePlus := false
|
||||||
|
query := "sample query"
|
||||||
|
|
||||||
|
templateToOutput := make(map[string]string)
|
||||||
|
templateToFile := make(map[string]string) // same as above, but the file contents will be matched
|
||||||
|
// I. item type placeholder
|
||||||
|
templateToOutput[`{}`] = `{{.O}}1a 1b 1c 1d 1e 1f{{.O}}`
|
||||||
|
templateToOutput[`{+}`] = `{{.O}}1a 1b 1c 1d 1e 1f{{.O}} {{.O}}2a 2b 2c 2d 2e 2f{{.O}} {{.O}}3a 3b 3c 3d 3e 3f{{.O}} {{.O}}4a 4b 4c 4d 4e 4f{{.O}} {{.O}}5a 5b 5c 5d 5e 5f{{.O}} {{.O}}6a 6b 6c 6d 6e 6f{{.O}} {{.O}}7a 7b 7c 7d 7e 7f{{.O}}`
|
||||||
|
templateToOutput[`{n}`] = `0`
|
||||||
|
templateToOutput[`{+n}`] = `0 0 0 0 0 0 0`
|
||||||
|
templateToFile[`{f}`] = `1a 1b 1c 1d 1e 1f{{.S}}`
|
||||||
|
templateToFile[`{+f}`] = `1a 1b 1c 1d 1e 1f{{.S}}2a 2b 2c 2d 2e 2f{{.S}}3a 3b 3c 3d 3e 3f{{.S}}4a 4b 4c 4d 4e 4f{{.S}}5a 5b 5c 5d 5e 5f{{.S}}6a 6b 6c 6d 6e 6f{{.S}}7a 7b 7c 7d 7e 7f{{.S}}`
|
||||||
|
templateToFile[`{nf}`] = `0{{.S}}`
|
||||||
|
templateToFile[`{+nf}`] = `0{{.S}}0{{.S}}0{{.S}}0{{.S}}0{{.S}}0{{.S}}0{{.S}}`
|
||||||
|
|
||||||
|
// II. token type placeholders
|
||||||
|
templateToOutput[`{..}`] = templateToOutput[`{}`]
|
||||||
|
templateToOutput[`{1..}`] = templateToOutput[`{}`]
|
||||||
|
templateToOutput[`{..2}`] = `{{.O}}1a 1b{{.O}}`
|
||||||
|
templateToOutput[`{1..2}`] = templateToOutput[`{..2}`]
|
||||||
|
templateToOutput[`{-2..-1}`] = `{{.O}}1e 1f{{.O}}`
|
||||||
|
// shorthand for x..x range
|
||||||
|
templateToOutput[`{1}`] = `{{.O}}1a{{.O}}`
|
||||||
|
templateToOutput[`{1..1}`] = templateToOutput[`{1}`]
|
||||||
|
templateToOutput[`{-6}`] = templateToOutput[`{1}`]
|
||||||
|
// multiple ranges
|
||||||
|
templateToOutput[`{1,2}`] = templateToOutput[`{1..2}`]
|
||||||
|
templateToOutput[`{1,2,4}`] = `{{.O}}1a 1b 1d{{.O}}`
|
||||||
|
templateToOutput[`{1,2..4}`] = `{{.O}}1a 1b 1c 1d{{.O}}`
|
||||||
|
templateToOutput[`{1..2,-4..-3}`] = `{{.O}}1a 1b 1c 1d{{.O}}`
|
||||||
|
// flags
|
||||||
|
templateToOutput[`{+1}`] = `{{.O}}1a{{.O}} {{.O}}2a{{.O}} {{.O}}3a{{.O}} {{.O}}4a{{.O}} {{.O}}5a{{.O}} {{.O}}6a{{.O}} {{.O}}7a{{.O}}`
|
||||||
|
templateToOutput[`{+-1}`] = `{{.O}}1f{{.O}} {{.O}}2f{{.O}} {{.O}}3f{{.O}} {{.O}}4f{{.O}} {{.O}}5f{{.O}} {{.O}}6f{{.O}} {{.O}}7f{{.O}}`
|
||||||
|
templateToOutput[`{s1}`] = `{{.O}}1a {{.O}}`
|
||||||
|
templateToFile[`{f1}`] = `1a{{.S}}`
|
||||||
|
templateToOutput[`{+s1..2}`] = `{{.O}}1a 1b {{.O}} {{.O}}2a 2b {{.O}} {{.O}}3a 3b {{.O}} {{.O}}4a 4b {{.O}} {{.O}}5a 5b {{.O}} {{.O}}6a 6b {{.O}} {{.O}}7a 7b {{.O}}`
|
||||||
|
templateToFile[`{+sf1..2}`] = `1a 1b {{.S}}2a 2b {{.S}}3a 3b {{.S}}4a 4b {{.S}}5a 5b {{.S}}6a 6b {{.S}}7a 7b {{.S}}`
|
||||||
|
|
||||||
|
// III. query type placeholder
|
||||||
|
// query flag is not removed after parsing, so it gets doubled
|
||||||
|
// while the double q is invalid, it is useful here for testing purposes
|
||||||
|
templateToOutput[`{q}`] = "{{.O}}" + query + "{{.O}}"
|
||||||
|
templateToOutput[`{fzf:query}`] = "{{.O}}" + query + "{{.O}}"
|
||||||
|
templateToOutput[`{fzf:action} {fzf:prompt}`] = "backward-delete-char-eof 'prompt'"
|
||||||
|
|
||||||
|
// IV. escaping placeholder
|
||||||
|
templateToOutput[`\{}`] = `{}`
|
||||||
|
templateToOutput[`\{q}`] = `{q}`
|
||||||
|
templateToOutput[`\{fzf:query}`] = `{fzf:query}`
|
||||||
|
templateToOutput[`\{fzf:action}`] = `{fzf:action}`
|
||||||
|
templateToOutput[`\{++}`] = `{++}`
|
||||||
|
templateToOutput[`{++}`] = templateToOutput[`{+}`]
|
||||||
|
|
||||||
|
for giveTemplate, wantOutput := range templateToOutput {
|
||||||
|
result = replacePlaceholderTest(giveTemplate, stripAnsi, Delimiter{}, printsep, forcePlus, query, items3)
|
||||||
|
checkFormat(wantOutput)
|
||||||
|
}
|
||||||
|
for giveTemplate, wantOutput := range templateToFile {
|
||||||
|
path := replacePlaceholderTest(giveTemplate, stripAnsi, Delimiter{}, printsep, forcePlus, query, items3)
|
||||||
|
|
||||||
|
data, err := readFile(path)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Cannot read the content of the temp file %s.", path)
|
||||||
|
}
|
||||||
|
result = string(data)
|
||||||
|
|
||||||
|
checkFormat(wantOutput)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuoteEntry(t *testing.T) {
|
||||||
|
type quotes struct{ E, O, SQ, DQ, BS string } // standalone escape, outer, single and double quotes, backslash
|
||||||
|
unixStyle := quotes{``, `'`, `'\''`, `"`, `\`}
|
||||||
|
windowsStyle := quotes{`^`, `^"`, `'`, `\^"`, `\\`}
|
||||||
|
var effectiveStyle quotes
|
||||||
|
exec := util.NewExecutor("")
|
||||||
|
|
||||||
|
if util.IsWindows() {
|
||||||
|
effectiveStyle = windowsStyle
|
||||||
|
} else {
|
||||||
|
effectiveStyle = unixStyle
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := map[string]string{
|
||||||
|
`'`: `{{.O}}{{.SQ}}{{.O}}`,
|
||||||
|
`"`: `{{.O}}{{.DQ}}{{.O}}`,
|
||||||
|
`\`: `{{.O}}{{.BS}}{{.O}}`,
|
||||||
|
`\"`: `{{.O}}{{.BS}}{{.DQ}}{{.O}}`,
|
||||||
|
`"\\\"`: `{{.O}}{{.DQ}}{{.BS}}{{.BS}}{{.BS}}{{.DQ}}{{.O}}`,
|
||||||
|
|
||||||
|
`$`: `{{.O}}${{.O}}`,
|
||||||
|
`$HOME`: `{{.O}}$HOME{{.O}}`,
|
||||||
|
`'$HOME'`: `{{.O}}{{.SQ}}$HOME{{.SQ}}{{.O}}`,
|
||||||
|
|
||||||
|
`&`: `{{.O}}{{.E}}&{{.O}}`,
|
||||||
|
`|`: `{{.O}}{{.E}}|{{.O}}`,
|
||||||
|
`<`: `{{.O}}{{.E}}<{{.O}}`,
|
||||||
|
`>`: `{{.O}}{{.E}}>{{.O}}`,
|
||||||
|
`(`: `{{.O}}{{.E}}({{.O}}`,
|
||||||
|
`)`: `{{.O}}{{.E}}){{.O}}`,
|
||||||
|
`@`: `{{.O}}{{.E}}@{{.O}}`,
|
||||||
|
`^`: `{{.O}}{{.E}}^{{.O}}`,
|
||||||
|
`%`: `{{.O}}{{.E}}%{{.O}}`,
|
||||||
|
`!`: `{{.O}}{{.E}}!{{.O}}`,
|
||||||
|
`%USERPROFILE%`: `{{.O}}{{.E}}%USERPROFILE{{.E}}%{{.O}}`,
|
||||||
|
`C:\Program Files (x86)\`: `{{.O}}C:{{.BS}}Program Files {{.E}}(x86{{.E}}){{.BS}}{{.O}}`,
|
||||||
|
`"C:\Program Files"`: `{{.O}}{{.DQ}}C:{{.BS}}Program Files{{.DQ}}{{.O}}`,
|
||||||
|
}
|
||||||
|
|
||||||
|
for input, expected := range tests {
|
||||||
|
escaped := exec.QuoteEntry(input)
|
||||||
|
expected = templateToString(expected, effectiveStyle)
|
||||||
|
if escaped != expected {
|
||||||
|
t.Errorf("Input: %s, expected: %s, actual %s", input, expected, escaped)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// purpose of this test is to demonstrate some shortcomings of fzf's templating system on Unix
|
||||||
|
func TestUnixCommands(t *testing.T) {
|
||||||
|
if util.IsWindows() {
|
||||||
|
t.SkipNow()
|
||||||
|
}
|
||||||
|
tests := []testCase{
|
||||||
|
// reference: give{template, query, items}, want{output OR match}
|
||||||
|
|
||||||
|
// 1) working examples
|
||||||
|
|
||||||
|
// paths that does not have to evaluated will work fine, when quoted
|
||||||
|
{give{`grep foo {}`, ``, newItems(`test`)}, want{output: `grep foo 'test'`}},
|
||||||
|
{give{`grep foo {}`, ``, newItems(`/home/user/test`)}, want{output: `grep foo '/home/user/test'`}},
|
||||||
|
{give{`grep foo {}`, ``, newItems(`./test`)}, want{output: `grep foo './test'`}},
|
||||||
|
|
||||||
|
// only placeholders are escaped as data, this will lookup tilde character in a test file in your home directory
|
||||||
|
// quoting the tilde is required (to be treated as string)
|
||||||
|
{give{`grep {} ~/test`, ``, newItems(`~`)}, want{output: `grep '~' ~/test`}},
|
||||||
|
|
||||||
|
// 2) problematic examples
|
||||||
|
// (not necessarily unexpected)
|
||||||
|
|
||||||
|
// paths that need to expand some part of it won't work (special characters and variables)
|
||||||
|
{give{`cat {}`, ``, newItems(`~/test`)}, want{output: `cat '~/test'`}},
|
||||||
|
{give{`cat {}`, ``, newItems(`$HOME/test`)}, want{output: `cat '$HOME/test'`}},
|
||||||
|
}
|
||||||
|
testCommands(t, tests)
|
||||||
|
}
|
||||||
|
|
||||||
|
// purpose of this test is to demonstrate some shortcomings of fzf's templating system on Windows
|
||||||
|
func TestWindowsCommands(t *testing.T) {
|
||||||
|
// XXX Deprecated
|
||||||
|
t.SkipNow()
|
||||||
|
|
||||||
|
tests := []testCase{
|
||||||
|
// reference: give{template, query, items}, want{output OR match}
|
||||||
|
|
||||||
|
// 1) working examples
|
||||||
|
|
||||||
|
// example of redundantly escaped backslash in the output, besides looking bit ugly, it won't cause any issue
|
||||||
|
{give{`type {}`, ``, newItems(`C:\test.txt`)}, want{output: `type ^"C:\\test.txt^"`}},
|
||||||
|
{give{`rg -- "package" {}`, ``, newItems(`.\test.go`)}, want{output: `rg -- "package" ^".\\test.go^"`}},
|
||||||
|
// example of mandatorily escaped backslash in the output, otherwise `rg -- "C:\test.txt"` is matching for tabulator
|
||||||
|
{give{`rg -- {}`, ``, newItems(`C:\test.txt`)}, want{output: `rg -- ^"C:\\test.txt^"`}},
|
||||||
|
// example of mandatorily escaped double quote in the output, otherwise `rg -- ""C:\\test.txt""` is not matching for the double quotes around the path
|
||||||
|
{give{`rg -- {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `rg -- ^"\^"C:\\test.txt\^"^"`}},
|
||||||
|
|
||||||
|
// 2) problematic examples
|
||||||
|
// (not necessarily unexpected)
|
||||||
|
|
||||||
|
// notepad++'s parser can't handle `-n"12"` generate by fzf, expects `-n12`
|
||||||
|
{give{`notepad++ -n{1} {2}`, ``, newItems(`12 C:\Work\Test Folder\File.txt`)}, want{output: `notepad++ -n^"12^" ^"C:\\Work\\Test Folder\\File.txt^"`}},
|
||||||
|
|
||||||
|
// cat is parsing `\"` as a part of the file path, double quote is illegal character for paths on Windows
|
||||||
|
// cat: "C:\\test.txt: Invalid argument
|
||||||
|
{give{`cat {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `cat ^"\^"C:\\test.txt\^"^"`}},
|
||||||
|
// cat: "C:\\test.txt": Invalid argument
|
||||||
|
{give{`cmd /c {}`, ``, newItems(`cat "C:\test.txt"`)}, want{output: `cmd /c ^"cat \^"C:\\test.txt\^"^"`}},
|
||||||
|
|
||||||
|
// the "file" flag in the pattern won't create *.bat or *.cmd file so the command in the output tries to edit the file, instead of executing it
|
||||||
|
// the temp file contains: `cat "C:\test.txt"`
|
||||||
|
// TODO this should actually work
|
||||||
|
{give{`cmd /c {f}`, ``, newItems(`cat "C:\test.txt"`)}, want{match: `^cmd /c .*\fzf-preview-[0-9]{9}$`}},
|
||||||
|
}
|
||||||
|
testCommands(t, tests)
|
||||||
|
}
|
||||||
|
|
||||||
|
// purpose of this test is to demonstrate some shortcomings of fzf's templating system on Windows in Powershell
|
||||||
|
func TestPowershellCommands(t *testing.T) {
|
||||||
|
if !util.IsWindows() {
|
||||||
|
t.SkipNow()
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []testCase{
|
||||||
|
// reference: give{template, query, items}, want{output OR match}
|
||||||
|
|
||||||
|
/*
|
||||||
|
You can read each line in the following table as a pipeline that
|
||||||
|
consist of series of parsers that act upon your input (col. 1) and
|
||||||
|
each cell represents the output value.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
- exec.Command("program.exe", `\''`)
|
||||||
|
- goes to win32 api which will process it transparently as it contains no special characters, see [CommandLineToArgvW][].
|
||||||
|
- powershell command will receive it as is, that is two arguments: a literal backslash and empty string in single quotes
|
||||||
|
- native command run via/from powershell will receive only one argument: a literal backslash. Because extra parsing rules apply, see [NativeCallsFromPowershell][].
|
||||||
|
- some¹ apps have internal parser, that requires one more level of escaping (yes, this is completely application-specific, but see terminal_test.go#TestWindowsCommands)
|
||||||
|
|
||||||
|
Character⁰ CommandLineToArgvW Powershell commands Native commands from Powershell Apps requiring escapes¹ | Being tested below
|
||||||
|
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||||
|
" empty string² missing argument error ... ... |
|
||||||
|
\" literal " unbalanced quote error ... ... |
|
||||||
|
'\"' literal '"' literal " empty string empty string (match all) | yes
|
||||||
|
'\\\"' literal '\"' literal \" literal " literal " |
|
||||||
|
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||||
|
\ transparent transparent transparent regex error |
|
||||||
|
'\' transparent literal \ literal \ regex error | yes
|
||||||
|
\\ transparent transparent transparent literal \ |
|
||||||
|
'\\' transparent literal \\ literal \\ literal \ |
|
||||||
|
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||||
|
' transparent unbalanced quote error ... ... |
|
||||||
|
\' transparent literal \ and unb. quote error ... ... |
|
||||||
|
\'' transparent literal \ and empty string literal \ regex error | no, but given as example above
|
||||||
|
''' transparent unbalanced quote error ... ... |
|
||||||
|
'''' transparent literal ' literal ' literal ' | yes
|
||||||
|
---------- ------------------ ------------------------------ ------------------------------- -------------------------- | ------------------
|
||||||
|
|
||||||
|
⁰: charatecter or characters 'x' as an argument to a program in go's call: exec.Command("program.exe", `x`)
|
||||||
|
¹: native commands like grep, git grep, ripgrep
|
||||||
|
²: interpreted as a grouping quote, affects argument parser and gets removed from the result
|
||||||
|
|
||||||
|
[CommandLineToArgvW]: https://docs.microsoft.com/en-gb/windows/win32/api/shellapi/nf-shellapi-commandlinetoargvw#remarks
|
||||||
|
[NativeCallsFromPowershell]: https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_parsing?view=powershell-7.1#passing-arguments-that-contain-quote-characters
|
||||||
|
*/
|
||||||
|
|
||||||
|
// 1) working examples
|
||||||
|
|
||||||
|
{give{`Get-Content {}`, ``, newItems(`C:\test.txt`)}, want{output: `Get-Content 'C:\test.txt'`}},
|
||||||
|
{give{`rg -- "package" {}`, ``, newItems(`.\test.go`)}, want{output: `rg -- "package" '.\test.go'`}},
|
||||||
|
|
||||||
|
// example of escaping single quotes
|
||||||
|
{give{`rg -- {}`, ``, newItems(`'foobar'`)}, want{output: `rg -- '''foobar'''`}},
|
||||||
|
|
||||||
|
// chaining powershells
|
||||||
|
{give{`powershell -NoProfile -Command {}`, ``, newItems(`cat "C:\test.txt"`)}, want{output: `powershell -NoProfile -Command 'cat \"C:\test.txt\"'`}},
|
||||||
|
|
||||||
|
// 2) problematic examples
|
||||||
|
// (not necessarily unexpected)
|
||||||
|
|
||||||
|
// looking for a path string will only work with escaped backslashes
|
||||||
|
{give{`rg -- {}`, ``, newItems(`C:\test.txt`)}, want{output: `rg -- 'C:\test.txt'`}},
|
||||||
|
// looking for a literal double quote will only work with triple escaped double quotes
|
||||||
|
{give{`rg -- {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `rg -- '\"C:\test.txt\"'`}},
|
||||||
|
|
||||||
|
// Get-Content (i.e. cat alias) is parsing `"` as a part of the file path, returns an error:
|
||||||
|
// Get-Content : Cannot find drive. A drive with the name '"C:' does not exist.
|
||||||
|
{give{`cat {}`, ``, newItems(`"C:\test.txt"`)}, want{output: `cat '\"C:\test.txt\"'`}},
|
||||||
|
|
||||||
|
// the "file" flag in the pattern won't create *.ps1 file so the powershell will offload this "unknown" filetype
|
||||||
|
// to explorer, which will prompt user to pick editing program for the fzf-preview file
|
||||||
|
// the temp file contains: `cat "C:\test.txt"`
|
||||||
|
// TODO this should actually work
|
||||||
|
{give{`powershell -NoProfile -Command {f}`, ``, newItems(`cat "C:\test.txt"`)}, want{match: `^powershell -NoProfile -Command .*\fzf-preview-[0-9]{9}$`}},
|
||||||
|
}
|
||||||
|
|
||||||
|
// to force powershell-style escaping we temporarily set environment variable that fzf honors
|
||||||
|
shellBackup := os.Getenv("SHELL")
|
||||||
|
os.Setenv("SHELL", "powershell")
|
||||||
|
testCommands(t, tests)
|
||||||
|
os.Setenv("SHELL", shellBackup)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Test typical valid placeholders and parsing of them.
|
||||||
|
|
||||||
|
Also since the parser assumes the input is matched with `placeholder` regex,
|
||||||
|
the regex is tested here as well.
|
||||||
|
*/
|
||||||
|
func TestParsePlaceholder(t *testing.T) {
|
||||||
|
// give, want pairs
|
||||||
|
templates := map[string]string{
|
||||||
|
// I. item type placeholder
|
||||||
|
`{}`: `{}`,
|
||||||
|
`{+}`: `{+}`,
|
||||||
|
`{n}`: `{n}`,
|
||||||
|
`{+n}`: `{+n}`,
|
||||||
|
`{f}`: `{f}`,
|
||||||
|
`{+nf}`: `{+nf}`,
|
||||||
|
|
||||||
|
// II. token type placeholders
|
||||||
|
`{..}`: `{..}`,
|
||||||
|
`{1..}`: `{1..}`,
|
||||||
|
`{..2}`: `{..2}`,
|
||||||
|
`{1..2}`: `{1..2}`,
|
||||||
|
`{-2..-1}`: `{-2..-1}`,
|
||||||
|
// shorthand for x..x range
|
||||||
|
`{1}`: `{1}`,
|
||||||
|
`{1..1}`: `{1..1}`,
|
||||||
|
`{-6}`: `{-6}`,
|
||||||
|
// multiple ranges
|
||||||
|
`{1,2}`: `{1,2}`,
|
||||||
|
`{1,2,4}`: `{1,2,4}`,
|
||||||
|
`{1,2..4}`: `{1,2..4}`,
|
||||||
|
`{1..2,-4..-3}`: `{1..2,-4..-3}`,
|
||||||
|
// flags
|
||||||
|
`{+1}`: `{+1}`,
|
||||||
|
`{+-1}`: `{+-1}`,
|
||||||
|
`{s1}`: `{s1}`,
|
||||||
|
`{f1}`: `{f1}`,
|
||||||
|
`{+s1..2}`: `{+s1..2}`,
|
||||||
|
`{+sf1..2}`: `{+sf1..2}`,
|
||||||
|
|
||||||
|
// III. query type placeholder
|
||||||
|
// query flag is not removed after parsing, so it gets doubled
|
||||||
|
// while the double q is invalid, it is useful here for testing purposes
|
||||||
|
`{q}`: `{qq}`,
|
||||||
|
`{q:1}`: `{qq:1}`,
|
||||||
|
`{q:2..}`: `{qq:2..}`,
|
||||||
|
`{q:..}`: `{qq:..}`,
|
||||||
|
`{q:2..-1}`: `{qq:2..-1}`,
|
||||||
|
`{q:s2..-1}`: `{sqq:2..-1}`, // FIXME
|
||||||
|
|
||||||
|
// IV. escaping placeholder
|
||||||
|
`\{}`: `{}`,
|
||||||
|
`\{++}`: `{++}`,
|
||||||
|
`{++}`: `{+}`,
|
||||||
|
}
|
||||||
|
|
||||||
|
for giveTemplate, wantTemplate := range templates {
|
||||||
|
if !placeholder.MatchString(giveTemplate) {
|
||||||
|
t.Errorf(`given placeholder %s does not match placeholder regex, so attempt to parse it is unexpected`, giveTemplate)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
_, placeholderWithoutFlags, flags := parsePlaceholder(giveTemplate)
|
||||||
|
gotTemplate := placeholderWithoutFlags[:1] + flags.encodePlaceholder() + placeholderWithoutFlags[1:]
|
||||||
|
|
||||||
|
if gotTemplate != wantTemplate {
|
||||||
|
t.Errorf(`parsed placeholder "%s" into "%s", but want "%s"`, giveTemplate, gotTemplate, wantTemplate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtractPassthroughs(t *testing.T) {
|
||||||
|
for _, middle := range []string{
|
||||||
|
"\x1bPtmux;\x1b\x1bbar\x1b\\",
|
||||||
|
"\x1bPtmux;\x1b\x1bbar\x1bbar\x1b\\",
|
||||||
|
"\x1b]1337;bar\x1b\\",
|
||||||
|
"\x1b]1337;bar\x1bbar\x1b\\",
|
||||||
|
"\x1b]1337;bar\a",
|
||||||
|
"\x1b_Ga=T,f=32,s=1258,v=1295,c=74,r=35,m=1\x1b\\",
|
||||||
|
"\x1b_Ga=T,f=32,s=1258,v=1295,c=74,r=35,m=1\x1b\\\r",
|
||||||
|
"\x1b_Ga=T,f=32,s=1258,v=1295,c=74,r=35,m=1\x1bbar\x1b\\\r",
|
||||||
|
"\x1b_Gm=1;AAAAAAAAA=\x1b\\",
|
||||||
|
"\x1b_Gm=1;AAAAAAAAA=\x1b\\\r",
|
||||||
|
"\x1b_Gm=1;\x1bAAAAAAAAA=\x1b\\\r",
|
||||||
|
} {
|
||||||
|
line := "foo" + middle + "baz"
|
||||||
|
loc := findPassThrough(line)
|
||||||
|
if loc == nil || line[0:loc[0]] != "foo" || line[loc[1]:] != "baz" {
|
||||||
|
t.Error("failed to find passthrough")
|
||||||
|
}
|
||||||
|
garbage := "\x1bPtmux;\x1b]1337;\x1b_Ga=\x1b]1337;bar\x1b."
|
||||||
|
line = strings.Repeat("foo"+middle+middle+"baz", 3) + garbage
|
||||||
|
passthroughs, result := extractPassThroughs(line)
|
||||||
|
if result != "foobazfoobazfoobaz"+garbage || len(passthroughs) != 6 {
|
||||||
|
t.Error("failed to extract passthroughs")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* utilities section */
|
||||||
|
|
||||||
|
// Item represents one line in fzf UI. Usually it is relative path to files and folders.
|
||||||
|
func newItem(str string) *Item {
|
||||||
|
bytes := []byte(str)
|
||||||
|
trimmed, _, _ := extractColor(str, nil, nil)
|
||||||
|
return &Item{origText: &bytes, text: util.ToChars([]byte(trimmed))}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Functions tested in this file require array of items (allItems).
|
||||||
|
// This is helper function.
|
||||||
|
func newItems(str ...string) [3][]*Item {
|
||||||
|
result := make([]*Item, len(str))
|
||||||
|
for i, s := range str {
|
||||||
|
result[i] = newItem(s)
|
||||||
|
}
|
||||||
|
return [3][]*Item{result, nil, nil}
|
||||||
|
}
|
||||||
|
|
||||||
|
// (for logging purposes)
|
||||||
|
func (item *Item) String() string {
|
||||||
|
return item.AsString(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to parse, execute and convert "text/template" to string. Panics on error.
|
||||||
|
func templateToString(format string, data any) string {
|
||||||
|
bb := &bytes.Buffer{}
|
||||||
|
|
||||||
|
err := template.Must(template.New("").Parse(format)).Execute(bb, data)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return bb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ad hoc types for test cases
|
||||||
|
type give struct {
|
||||||
|
template string
|
||||||
|
query string
|
||||||
|
allItems [3][]*Item
|
||||||
|
}
|
||||||
|
type want struct {
|
||||||
|
/*
|
||||||
|
Unix:
|
||||||
|
The `want.output` string is supposed to be formatted for evaluation by
|
||||||
|
`sh -c command` system call.
|
||||||
|
|
||||||
|
Windows:
|
||||||
|
The `want.output` string is supposed to be formatted for evaluation by
|
||||||
|
`cmd.exe /s /c "command"` system call. The `/s` switch enables so called old
|
||||||
|
behaviour, which is more favourable for nesting (possibly escaped)
|
||||||
|
special characters. This is the relevant section of `help cmd`:
|
||||||
|
|
||||||
|
...old behavior is to see if the first character is
|
||||||
|
a quote character and if so, strip the leading character and
|
||||||
|
remove the last quote character on the command line, preserving
|
||||||
|
any text after the last quote character.
|
||||||
|
*/
|
||||||
|
output string // literal output
|
||||||
|
match string // output is matched against this regex (when output is empty string)
|
||||||
|
}
|
||||||
|
type testCase struct {
|
||||||
|
give
|
||||||
|
want
|
||||||
|
}
|
||||||
|
|
||||||
|
func testCommands(t *testing.T, tests []testCase) {
|
||||||
|
// common test parameters
|
||||||
|
delim := "\t"
|
||||||
|
delimiter := Delimiter{str: &delim}
|
||||||
|
printsep := ""
|
||||||
|
stripAnsi := false
|
||||||
|
forcePlus := false
|
||||||
|
|
||||||
|
// evaluate the test cases
|
||||||
|
for idx, test := range tests {
|
||||||
|
gotOutput := replacePlaceholderTest(
|
||||||
|
test.template, stripAnsi, delimiter, printsep, forcePlus,
|
||||||
|
test.query,
|
||||||
|
test.allItems)
|
||||||
|
switch {
|
||||||
|
case test.output != "":
|
||||||
|
if gotOutput != test.output {
|
||||||
|
t.Errorf("tests[%v]:\ngave{\n\ttemplate: '%s',\n\tquery: '%s',\n\tallItems: %s}\nand got '%s',\nbut want '%s'",
|
||||||
|
idx,
|
||||||
|
test.template, test.query, test.allItems,
|
||||||
|
gotOutput, test.output)
|
||||||
|
}
|
||||||
|
case test.match != "":
|
||||||
|
wantMatch := strings.ReplaceAll(test.match, `\`, `\\`)
|
||||||
|
wantRegex := regexp.MustCompile(wantMatch)
|
||||||
|
if !wantRegex.MatchString(gotOutput) {
|
||||||
|
t.Errorf("tests[%v]:\ngave{\n\ttemplate: '%s',\n\tquery: '%s',\n\tallItems: %s}\nand got '%s',\nbut want '%s'",
|
||||||
|
idx,
|
||||||
|
test.template, test.query, test.allItems,
|
||||||
|
gotOutput, test.match)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
t.Errorf("tests[%v]: test case does not describe 'want' property", idx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// naive encoder of placeholder flags
|
||||||
|
func (flags placeholderFlags) encodePlaceholder() string {
|
||||||
|
encoded := ""
|
||||||
|
if flags.plus {
|
||||||
|
encoded += "+"
|
||||||
|
}
|
||||||
|
if flags.preserveSpace {
|
||||||
|
encoded += "s"
|
||||||
|
}
|
||||||
|
if flags.number {
|
||||||
|
encoded += "n"
|
||||||
|
}
|
||||||
|
if flags.file {
|
||||||
|
encoded += "f"
|
||||||
|
}
|
||||||
|
if flags.forceUpdate { // FIXME
|
||||||
|
encoded += "q"
|
||||||
|
}
|
||||||
|
return encoded
|
||||||
|
}
|
||||||
|
|
||||||
|
// can be replaced with os.ReadFile() in go 1.16+
|
||||||
|
func readFile(path string) ([]byte, error) {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
data := make([]byte, 0, 128)
|
||||||
|
for {
|
||||||
|
if len(data) >= cap(data) {
|
||||||
|
d := append(data[:cap(data)], 0)
|
||||||
|
data = d[:len(data)]
|
||||||
|
}
|
||||||
|
|
||||||
|
n, err := file.Read(data[len(data):cap(data)])
|
||||||
|
data = data[:len(data)+n]
|
||||||
|
if err != nil {
|
||||||
|
if err == io.EOF {
|
||||||
|
err = nil
|
||||||
|
}
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// +build !windows
|
//go:build !windows
|
||||||
|
|
||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
@@ -6,8 +6,19 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
|
||||||
|
"golang.org/x/sys/unix"
|
||||||
)
|
)
|
||||||
|
|
||||||
func notifyOnResize(resizeChan chan<- os.Signal) {
|
func notifyOnResize(resizeChan chan<- os.Signal) {
|
||||||
signal.Notify(resizeChan, syscall.SIGWINCH)
|
signal.Notify(resizeChan, syscall.SIGWINCH)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func notifyStop(p *os.Process) {
|
||||||
|
pid := p.Pid
|
||||||
|
pgid, err := unix.Getpgid(pid)
|
||||||
|
if err == nil {
|
||||||
|
pid = pgid * -1
|
||||||
|
}
|
||||||
|
unix.Kill(pid, syscall.SIGTSTP)
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// +build windows
|
//go:build windows
|
||||||
|
|
||||||
package fzf
|
package fzf
|
||||||
|
|
||||||
@@ -9,3 +9,7 @@ import (
|
|||||||
func notifyOnResize(resizeChan chan<- os.Signal) {
|
func notifyOnResize(resizeChan chan<- os.Signal) {
|
||||||
// TODO
|
// TODO
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func notifyStop(p *os.Process) {
|
||||||
|
// NOOP
|
||||||
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user