Compare commits
1023 Commits
d3e0b86a91
...
fdeb5f7951
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fdeb5f7951 | ||
|
|
a72395fe83 | ||
|
|
a8871a9575 | ||
|
|
b538e2f8c1 | ||
|
|
82ec29f6a1 | ||
|
|
02d9872b95 | ||
|
|
cb1f330231 | ||
|
|
8693061338 | ||
|
|
80e9457305 | ||
|
|
9b85af439d | ||
|
|
0be54abe60 | ||
|
|
059bde09e4 | ||
|
|
9c354d5ef4 | ||
|
|
84666155c4 | ||
|
|
27d3ecad04 | ||
|
|
022bc70f53 | ||
|
|
d27776ac23 | ||
|
|
fb70b7cc0b | ||
|
|
de1d546b67 | ||
|
|
91fc41f56e | ||
|
|
5d7c0658b9 | ||
|
|
f2951bf78e | ||
|
|
4b67ff5763 | ||
|
|
dca1d6c6a4 | ||
|
|
4dcbb7c942 | ||
|
|
4c7b174dd5 | ||
|
|
b151998144 | ||
|
|
a23cf1bf3b | ||
|
|
f7a73dd5e1 | ||
|
|
0a267cf9ec | ||
|
|
24e66ca75a | ||
|
|
0aaf22d4b6 | ||
|
|
7be0b6735e | ||
|
|
faeba25c53 | ||
|
|
b67018d981 | ||
|
|
f2d340e778 | ||
|
|
08b5f284d3 | ||
|
|
358df8acea | ||
|
|
f1f60f7178 | ||
|
|
a20f651c01 | ||
|
|
07d0603d8e | ||
|
|
1098d58ff9 | ||
|
|
5ee1a3cfca | ||
|
|
6a4936d8d4 | ||
|
|
8848227837 | ||
|
|
5cb2dd5c7e | ||
|
|
9d3f9957ca | ||
|
|
08016d0305 | ||
|
|
4d16762d4c | ||
|
|
94e82e132a | ||
|
|
ec95bc8028 | ||
|
|
0e82369d9a | ||
|
|
7b81f04eea | ||
|
|
4d566aff8b | ||
|
|
5f61f33e37 | ||
|
|
3a3466bf2e | ||
|
|
1faaafbfda | ||
|
|
eb935bae73 | ||
|
|
0bf01e3105 | ||
|
|
508b545861 | ||
|
|
b0e359989a | ||
|
|
71fa20ebb3 | ||
|
|
9bade07938 | ||
|
|
0d68f39c08 | ||
|
|
84bf7ac5f4 | ||
|
|
22cd07284b | ||
|
|
9e11756d4a | ||
|
|
11ec6cf01e | ||
|
|
e9ab630a74 | ||
|
|
b257a74162 | ||
|
|
b8263ee29e | ||
|
|
9f7534f0ae | ||
|
|
6a79f0455c | ||
|
|
3df604e9bd | ||
|
|
2a4a7d46c4 | ||
|
|
0e036e3789 | ||
|
|
9f48d7980b | ||
|
|
3ddecc9ed5 | ||
|
|
f3a4894699 | ||
|
|
7ee8ef6ce6 | ||
|
|
88c7290a7b | ||
|
|
d44f3fbb5b | ||
|
|
d219f557db | ||
|
|
f220ecd6ab | ||
|
|
16c035eb50 | ||
|
|
ce9b823e17 | ||
|
|
6cff3bd165 | ||
|
|
46a245a59a | ||
|
|
7e4b138b39 | ||
|
|
4c47dceef5 | ||
|
|
9ad3c1d9c8 | ||
|
|
099698e388 | ||
|
|
70070de9fa | ||
|
|
557b821e14 | ||
|
|
cb4581c4e3 | ||
|
|
311aba39d4 | ||
|
|
606bb0f76c | ||
|
|
538a67ee14 | ||
|
|
22b1b0657e | ||
|
|
a7bcad40bb | ||
|
|
c199590b3c | ||
|
|
c145974ce3 | ||
|
|
e4fc653397 | ||
|
|
b48aade271 | ||
|
|
653aebe1f0 | ||
|
|
58b5c8187b | ||
|
|
56a0ba2519 | ||
|
|
6ccea1872c | ||
|
|
978dae32d8 | ||
|
|
c965e9f51c | ||
|
|
c5f43b56f2 | ||
|
|
ddafeb3a28 | ||
|
|
a72352eb15 | ||
|
|
93d99a644a | ||
|
|
67a61093d5 | ||
|
|
cb716c161c | ||
|
|
d34472254b | ||
|
|
25b99b938e | ||
|
|
ec4dfed2be | ||
|
|
3f1c9265fe | ||
|
|
edfe0fcd6e | ||
|
|
d894f686a1 | ||
|
|
845ecf1498 | ||
|
|
5521a352a7 | ||
|
|
a528f47a14 | ||
|
|
fd95fbcac1 | ||
|
|
6f1352238a | ||
|
|
d9a36012bc | ||
|
|
c5eb6d140a | ||
|
|
146bd2e569 | ||
|
|
ea817ad629 | ||
|
|
a010b232bf | ||
|
|
d5ff2e639d | ||
|
|
ad376c40f1 | ||
|
|
f3cfb1faa6 | ||
|
|
a0e0126ff3 | ||
|
|
c24728202d | ||
|
|
5b41d2c95e | ||
|
|
298773c507 | ||
|
|
39c3375cf5 | ||
|
|
8f3f6c05db | ||
|
|
530615a6a1 | ||
|
|
9acce9f13d | ||
|
|
18e95f4085 | ||
|
|
832330f31b | ||
|
|
eaf47d7fed | ||
|
|
bb45af93fc | ||
|
|
a8542d7dee | ||
|
|
b88daca131 | ||
|
|
b9059da814 | ||
|
|
0ab5c93845 | ||
|
|
c9c3e2eb7e | ||
|
|
36a584113d | ||
|
|
935f829b42 | ||
|
|
9a6b0ceced | ||
|
|
8cb7559d5d | ||
|
|
2dab5b9a51 | ||
|
|
123e3b9846 | ||
|
|
94fc538a50 | ||
|
|
b41d710a35 | ||
|
|
fc7ca8318b | ||
|
|
ef43c8a8f8 | ||
|
|
33a9049a1f | ||
|
|
8d56120b88 | ||
|
|
26a8712d82 | ||
|
|
b4046e0b18 | ||
|
|
eccdb0e13e | ||
|
|
50ee3ded9f | ||
|
|
bf257b5ff3 | ||
|
|
fb19499383 | ||
|
|
1a0478a545 | ||
|
|
9f328a376f | ||
|
|
d7f4724f26 | ||
|
|
cba4566cdd | ||
|
|
c4a3c54ff3 | ||
|
|
15417eb1d3 | ||
|
|
007f5e2377 | ||
|
|
f9c4b709ad | ||
|
|
e91431f6b6 | ||
|
|
08f37697d8 | ||
|
|
12665032a3 | ||
|
|
762fdfd876 | ||
|
|
d15ba05a44 | ||
|
|
aa8b942f2d | ||
|
|
44d039a7c2 | ||
|
|
8b38c3e345 | ||
|
|
63b5b85476 | ||
|
|
5d301e5dbf | ||
|
|
b036674d46 | ||
|
|
fed3722ef9 | ||
|
|
e1402807c4 | ||
|
|
a76f3bfc56 | ||
|
|
57413725c7 | ||
|
|
15496345d9 | ||
|
|
21f195d8f6 | ||
|
|
96deafbf78 | ||
|
|
383cf1e98f | ||
|
|
c952f62c21 | ||
|
|
63ce81c6ce | ||
|
|
5e866c7cbf | ||
|
|
332a609d7f | ||
|
|
e5f71c7c5d | ||
|
|
bfe56f5266 | ||
|
|
3524a12ffd | ||
|
|
09fa7576d0 | ||
|
|
4c9a8e8604 | ||
|
|
6aa26e7940 | ||
|
|
1de4863726 | ||
|
|
73415f89fc | ||
|
|
a7b46658ac | ||
|
|
99656ea048 | ||
|
|
1f983f2090 | ||
|
|
8f59cccbae | ||
|
|
06d9d95972 | ||
|
|
3098010716 | ||
|
|
eef4996ed3 | ||
|
|
7f10d615b2 | ||
|
|
de6c4d0c07 | ||
|
|
738af256b0 | ||
|
|
3a90c12dc2 | ||
|
|
618c164d2d | ||
|
|
20e406d5c8 | ||
|
|
82575bd3a1 | ||
|
|
cd16ff0352 | ||
|
|
e14c85a5af | ||
|
|
58bb70a2a4 | ||
|
|
6d05859954 | ||
|
|
254ac6359b | ||
|
|
bd2a8af186 | ||
|
|
1480ef504b | ||
|
|
f72a365c76 | ||
|
|
71d7ec0851 | ||
|
|
19188fabb9 | ||
|
|
feb508bf27 | ||
|
|
8c50af2246 | ||
|
|
2abdf5f2d9 | ||
|
|
0d575e8850 | ||
|
|
e34d0e315b | ||
|
|
5afe2f6bc8 | ||
|
|
639f2d511d | ||
|
|
2e62878acb | ||
|
|
426eff309b | ||
|
|
1bd3f95627 | ||
|
|
493d28699c | ||
|
|
9b86baeb3e | ||
|
|
9717cd4b68 | ||
|
|
672404402c | ||
|
|
a93b5e052a | ||
|
|
61d7dcb94e | ||
|
|
d7a0ac96b3 | ||
|
|
8795756232 | ||
|
|
2fcf922dd8 | ||
|
|
22c91202a5 | ||
|
|
3ed54ce6b3 | ||
|
|
c55527ea6a | ||
|
|
9b7751fa50 | ||
|
|
21b8267dcb | ||
|
|
44740518a7 | ||
|
|
95b703b301 | ||
|
|
5232e7d866 | ||
|
|
f527dfc83b | ||
|
|
288ab8a74d | ||
|
|
765a46a8d2 | ||
|
|
1cc1ccf15e | ||
|
|
4a1c3f6c92 | ||
|
|
e7cae3d12f | ||
|
|
2773855cec | ||
|
|
dce89e2f67 | ||
|
|
44624ba3c1 | ||
|
|
9a25bdea37 | ||
|
|
11559c7b26 | ||
|
|
21ba31e807 | ||
|
|
099f3cde69 | ||
|
|
fc413738b7 | ||
|
|
ccabeabe1e | ||
|
|
7311988d4a | ||
|
|
173114a993 | ||
|
|
49bd4a8885 | ||
|
|
8af59d0b3f | ||
|
|
8900f9b93e | ||
|
|
7ae2a3919f | ||
|
|
c07c83fb8f | ||
|
|
fbdcb94ba1 | ||
|
|
4472f3db94 | ||
|
|
6f05851282 | ||
|
|
99217c631b | ||
|
|
bf5ea8dc08 | ||
|
|
692a403a7e | ||
|
|
019591ae23 | ||
|
|
8c40b4de28 | ||
|
|
6b632ff244 | ||
|
|
d32a460e38 | ||
|
|
03916829b3 | ||
|
|
ff97bce04b | ||
|
|
0dc6632e1c | ||
|
|
ebd14db956 | ||
|
|
196fdbbda8 | ||
|
|
b58d0a2df5 | ||
|
|
4f417c29a8 | ||
|
|
9d9f98dc54 | ||
|
|
86dbf0c568 | ||
|
|
296807442e | ||
|
|
e9f591d2c4 | ||
|
|
219c368c05 | ||
|
|
66f9b71258 | ||
|
|
32a79c0a1b | ||
|
|
2b0e958b1f | ||
|
|
0c9e861307 | ||
|
|
eb42b207f0 | ||
|
|
467e91ab47 | ||
|
|
e0536506b9 | ||
|
|
8005f9baf0 | ||
|
|
79b63d2d73 | ||
|
|
47fd796d50 | ||
|
|
e056678799 | ||
|
|
809a1e6e2c | ||
|
|
59e1ecd181 | ||
|
|
713f33ad87 | ||
|
|
aa00bb134b | ||
|
|
7b518eb2e9 | ||
|
|
93ebf3ccd0 | ||
|
|
7132370dd2 | ||
|
|
9a32c68bd8 | ||
|
|
21bd31b97e | ||
|
|
3e4d90afcf | ||
|
|
1d791b9430 | ||
|
|
2e1ef484f3 | ||
|
|
b1f2fac564 | ||
|
|
56adbbf345 | ||
|
|
76ad07e2ee | ||
|
|
dac0148176 | ||
|
|
6143055aa6 | ||
|
|
ec64c79bc2 | ||
|
|
93e4a6154c | ||
|
|
d36db194f8 | ||
|
|
091b5e5e70 | ||
|
|
7c77e14c70 | ||
|
|
359f9f2c60 | ||
|
|
886ab27dc6 | ||
|
|
8cdcf987d2 | ||
|
|
f098dece8d | ||
|
|
8e58367675 | ||
|
|
6b7598e496 | ||
|
|
f947b06725 | ||
|
|
470bb98dbb | ||
|
|
694ad0e63a | ||
|
|
625236f8df | ||
|
|
1a4e9d531a | ||
|
|
3cfcbf0edd | ||
|
|
b7237595df | ||
|
|
bc085ae0fb | ||
|
|
2e40ab7a45 | ||
|
|
01b7046970 | ||
|
|
c3e3ab1e46 | ||
|
|
e664429465 | ||
|
|
123433e39b | ||
|
|
35bbf42e24 | ||
|
|
fe19ba0c5c | ||
|
|
8fcccf68e3 | ||
|
|
4c8466bb21 | ||
|
|
5542f5592a | ||
|
|
35e2368dea | ||
|
|
ee2a9c63ee | ||
|
|
272f75349c | ||
|
|
66696af67e | ||
|
|
4cae92f7cd | ||
|
|
5cfdc7e35a | ||
|
|
3b79ceca0e | ||
|
|
ee555b253f | ||
|
|
a419ae960c | ||
|
|
9ef4ba2abe | ||
|
|
6c29c9c4c5 | ||
|
|
dc2bd8dcdf | ||
|
|
e99d7ad0ea | ||
|
|
c50a7db130 | ||
|
|
1ded347355 | ||
|
|
7d4eda55e0 | ||
|
|
729d7a060b | ||
|
|
2ab0ed1938 | ||
|
|
b72431922b | ||
|
|
2823fa0466 | ||
|
|
d730e8d235 | ||
|
|
78cc39fbd2 | ||
|
|
2e20f74675 | ||
|
|
34642abaf9 | ||
|
|
efabb9c3ca | ||
|
|
79179cc465 | ||
|
|
3d4ed341e7 | ||
|
|
d693f9d5f2 | ||
|
|
fe41f95570 | ||
|
|
983af9b30f | ||
|
|
2523990075 | ||
|
|
0fc34a9fcb | ||
|
|
d5c36155e3 | ||
|
|
c10782c549 | ||
|
|
91bd270d9e | ||
|
|
51db54e0aa | ||
|
|
10c7ab9679 | ||
|
|
fee576f638 | ||
|
|
a1494717b9 | ||
|
|
b8f42ed03f | ||
|
|
9b793fa7ba | ||
|
|
7be728e233 | ||
|
|
06a4b3ec73 | ||
|
|
9ab4341c2e | ||
|
|
761542743c | ||
|
|
888e6457dd | ||
|
|
dd88547c37 | ||
|
|
56a7487abd | ||
|
|
b10781a8c0 | ||
|
|
50806ba81a | ||
|
|
ec6f0de95b | ||
|
|
cb8caed662 | ||
|
|
bfa233330e | ||
|
|
d5490eacb9 | ||
|
|
f7854a76ed | ||
|
|
c7d58636b0 | ||
|
|
52dc8cea32 | ||
|
|
40e4fcc74a | ||
|
|
f6220eb16e | ||
|
|
9e3df98e79 | ||
|
|
21edfbd633 | ||
|
|
3cc0fd3b22 | ||
|
|
940dc2efd7 | ||
|
|
a53591d28e | ||
|
|
6a2a5ad67f | ||
|
|
2a92ecaf59 | ||
|
|
b59875d1df | ||
|
|
f2e75d16fc | ||
|
|
5541220a68 | ||
|
|
bc78075b1a | ||
|
|
fb636d3077 | ||
|
|
15baa8f297 | ||
|
|
e374e2d99b | ||
|
|
db5b3153c0 | ||
|
|
a909fdb2bd | ||
|
|
45a528118c | ||
|
|
2e4cacaaba | ||
|
|
2435c8bebf | ||
|
|
bacfaa4fc3 | ||
|
|
1025f1bc01 | ||
|
|
c056e8e2f2 | ||
|
|
80deefb8b7 | ||
|
|
2fe9d593b1 | ||
|
|
01ba00db4b | ||
|
|
f0a2cf32ac | ||
|
|
bd32c68056 | ||
|
|
ea475386d6 | ||
|
|
da9ebb25da | ||
|
|
a820078214 | ||
|
|
fc54b8ff8f | ||
|
|
e279af07d3 | ||
|
|
7b1820cb63 | ||
|
|
b8d51e2fa2 | ||
|
|
79fdf05d84 | ||
|
|
892a3f24a5 | ||
|
|
b573c0dbe7 | ||
|
|
9d8506461c | ||
|
|
10cd22ee1a | ||
|
|
7bc9dedccd | ||
|
|
71b097c112 | ||
|
|
bc90085f05 | ||
|
|
564d593f04 | ||
|
|
46cf015572 | ||
|
|
091f750bd8 | ||
|
|
8cd1d6ddf2 | ||
|
|
1d61ac1243 | ||
|
|
95c79cacf6 | ||
|
|
0d6d200ab2 | ||
|
|
4f2e76e833 | ||
|
|
96cb67259d | ||
|
|
3e7f491b74 | ||
|
|
695c51a97e | ||
|
|
42a10cc2d5 | ||
|
|
324b96d1e1 | ||
|
|
f056b16c65 | ||
|
|
75222eeb7f | ||
|
|
15e7c2b6e5 | ||
|
|
aa2534c901 | ||
|
|
285d04aa9a | ||
|
|
a708186b4f | ||
|
|
97fb35b5f0 | ||
|
|
cb685d4329 | ||
|
|
d013b60fc1 | ||
|
|
430eafcf80 | ||
|
|
2d1174266d | ||
|
|
d6906e5ed9 | ||
|
|
19848fd379 | ||
|
|
b802c4f6f0 | ||
|
|
982c73dc03 | ||
|
|
3e8d4b223b | ||
|
|
1813e21e75 | ||
|
|
03b3eea957 | ||
|
|
90323ab6b1 | ||
|
|
c381e3aa38 | ||
|
|
7f9b5e3423 | ||
|
|
45d3ee3bfb | ||
|
|
14c5f926a4 | ||
|
|
74ebb87823 | ||
|
|
e0e5f5ff34 | ||
|
|
b23ec5604b | ||
|
|
a402ef413d | ||
|
|
31ed7b1b68 | ||
|
|
5a0c8777b2 | ||
|
|
55ac1758f3 | ||
|
|
8c8ac4f5be | ||
|
|
648c4e0cba | ||
|
|
7d42c5735e | ||
|
|
1159bbe0f5 | ||
|
|
442f20dda3 | ||
|
|
fdcccc844e | ||
|
|
de6cdf6bfc | ||
|
|
c91f135458 | ||
|
|
97c331c2b2 | ||
|
|
57088471a1 | ||
|
|
d527caa7bd | ||
|
|
a3ff74c8e9 | ||
|
|
89d62c7b11 | ||
|
|
091634433b | ||
|
|
90fd8d013d | ||
|
|
8857fc86cd | ||
|
|
697ec44f4d | ||
|
|
6579d47821 | ||
|
|
2f0e7e1c5e | ||
|
|
d442f41477 | ||
|
|
539f99f803 | ||
|
|
0ed7e3aae7 | ||
|
|
7bc6dd89a1 | ||
|
|
5c2b56c06a | ||
|
|
d5566e66c5 | ||
|
|
80f8cf7eb7 | ||
|
|
b7bf9b20de | ||
|
|
5d518711d5 | ||
|
|
e8f2c00416 | ||
|
|
1ea4fc2180 | ||
|
|
9088b76067 | ||
|
|
e10605bb7e | ||
|
|
9a052bddd7 | ||
|
|
6e8ff406c8 | ||
|
|
560acc0235 | ||
|
|
af4f88a0fc | ||
|
|
8f5ec6381f | ||
|
|
34c942e73b | ||
|
|
53df5d9260 | ||
|
|
538d565341 | ||
|
|
cb83cc4b77 | ||
|
|
25acb056d7 | ||
|
|
efc71d6f0e | ||
|
|
df18390a54 | ||
|
|
379c45b556 | ||
|
|
fbffd010be | ||
|
|
23906d4796 | ||
|
|
81ac8fcfbb | ||
|
|
2a94f5f155 | ||
|
|
0b751ca34b | ||
|
|
dd217d6c6a | ||
|
|
a92076bbec | ||
|
|
f2161c32f8 | ||
|
|
6a1ac7284e | ||
|
|
295b565cd6 | ||
|
|
ee856b238d | ||
|
|
8c1fad68a5 | ||
|
|
a583a8449c | ||
|
|
aa6a4f1015 | ||
|
|
ef1cb4b5fb | ||
|
|
73f23ff036 | ||
|
|
29079ccb24 | ||
|
|
0b0a601483 | ||
|
|
f23cc6e94f | ||
|
|
7b945b4f4f | ||
|
|
6becdb4fbd | ||
|
|
adc16bd761 | ||
|
|
549a671cf9 | ||
|
|
43088ad6a0 | ||
|
|
e9b9532160 | ||
|
|
0ad8f3ccfa | ||
|
|
e2bef42a55 | ||
|
|
a620db8cfe | ||
|
|
cf4254750d | ||
|
|
0b57da071a | ||
|
|
9f9e2749a9 | ||
|
|
80c6573e71 | ||
|
|
06e7e1a616 | ||
|
|
6a6afcfe6e | ||
|
|
e3e9db145d | ||
|
|
77089c181b | ||
|
|
221d8b3b27 | ||
|
|
c8afbb3984 | ||
|
|
04fb10d006 | ||
|
|
56465cdf1d | ||
|
|
be60c230b2 | ||
|
|
b73ba17f80 | ||
|
|
4b66b094d5 | ||
|
|
bd879100be | ||
|
|
6574b5a072 | ||
|
|
302fe010bd | ||
|
|
f5ba5dff2d | ||
|
|
0eab6736e1 | ||
|
|
a2cc0fa071 | ||
|
|
03ac98e219 | ||
|
|
1d463323ce | ||
|
|
3fb436dc44 | ||
|
|
1f82be1f02 | ||
|
|
e2db93f955 | ||
|
|
9751e7074c | ||
|
|
e2b434e52c | ||
|
|
c6c81088b8 | ||
|
|
77bd3f09a3 | ||
|
|
bedf000632 | ||
|
|
3fefb1c213 | ||
|
|
9d53141af7 | ||
|
|
293380600e | ||
|
|
c6544ab034 | ||
|
|
f44866a2cc | ||
|
|
a0fcd3f3e7 | ||
|
|
3cf9d38ae2 | ||
|
|
5661b78919 | ||
|
|
13ea7159e3 | ||
|
|
45b57fc547 | ||
|
|
9d60461354 | ||
|
|
ec428f5fc4 | ||
|
|
56f3c924b3 | ||
|
|
4497d9d095 | ||
|
|
13799cd337 | ||
|
|
a0fcb86fce | ||
|
|
ec5fde2771 | ||
|
|
3021932eb6 | ||
|
|
128d98c4e3 | ||
|
|
7b43827926 | ||
|
|
f48d7b33b8 | ||
|
|
4795a2b2cc | ||
|
|
e5eadbfc53 | ||
|
|
902c2f9c17 | ||
|
|
6b06b23686 | ||
|
|
a92c22b58c | ||
|
|
2a233b3d43 | ||
|
|
9e133d9527 | ||
|
|
689f377865 | ||
|
|
0a597d6263 | ||
|
|
224b5e5976 | ||
|
|
4ea1f248a7 | ||
|
|
2cfa06a45b | ||
|
|
e97807f7fa | ||
|
|
9b525445f3 | ||
|
|
43dac36c39 | ||
|
|
58b7cba55a | ||
|
|
801445a07c | ||
|
|
781108f6d3 | ||
|
|
c52ab9ed5f | ||
|
|
33b5cc2e92 | ||
|
|
6719dff149 | ||
|
|
bf48c37dd8 | ||
|
|
bb9901e9bc | ||
|
|
cf76aa8bc2 | ||
|
|
564accddfd | ||
|
|
bbdbdfa5be | ||
|
|
bfeba4151e | ||
|
|
f3132ec569 | ||
|
|
c00b6230d4 | ||
|
|
36a53c890c | ||
|
|
9f140923bc | ||
|
|
3611c93a4a | ||
|
|
1640932148 | ||
|
|
3478ffee2c | ||
|
|
41ff152a12 | ||
|
|
1fdb08b493 | ||
|
|
5830601150 | ||
|
|
04554fe04d | ||
|
|
52f707d1dd | ||
|
|
0932ea9614 | ||
|
|
4abb9794e0 | ||
|
|
1f11351d9b | ||
|
|
7eefb64d15 | ||
|
|
ad1064ec02 | ||
|
|
4acec3d3dd | ||
|
|
b9d6bd52d5 | ||
|
|
d03712874b | ||
|
|
6ee1c64080 | ||
|
|
694575ad36 | ||
|
|
b039bc4b33 | ||
|
|
4dc4d89f81 | ||
|
|
cbb5af6ea5 | ||
|
|
7ef2ac670b | ||
|
|
e364bd072a | ||
|
|
f552c978e0 | ||
|
|
62844b2073 | ||
|
|
81307bfe19 | ||
|
|
a814f60f32 | ||
|
|
10b1522095 | ||
|
|
c3dd017b25 | ||
|
|
06c6971d07 | ||
|
|
fd61b66a97 | ||
|
|
71b29d1def | ||
|
|
ad12b552a0 | ||
|
|
f6454d94f3 | ||
|
|
cd4838a474 | ||
|
|
321e00171e | ||
|
|
8cc0cfc606 | ||
|
|
6c00c2ab56 | ||
|
|
c49a63bb10 | ||
|
|
ff91e7051f | ||
|
|
97c4e26dbf | ||
|
|
90dd6d7718 | ||
|
|
5db2e9c063 | ||
|
|
b3a21d1f7c | ||
|
|
b2108f2369 | ||
|
|
dc8ed09b06 | ||
|
|
0fe61de89e | ||
|
|
456430b36e | ||
|
|
b015a63f04 | ||
|
|
dd08722cfc | ||
|
|
aa66bfca06 | ||
|
|
7d7b2d74fe | ||
|
|
9d99273ff7 | ||
|
|
a7b142945b | ||
|
|
078d90b723 | ||
|
|
378d42c2af | ||
|
|
a203059bb4 | ||
|
|
08ac832da4 | ||
|
|
ff85bb611b | ||
|
|
1880d1059e | ||
|
|
ee0545b739 | ||
|
|
53c63360cb | ||
|
|
a1bcba5cb1 | ||
|
|
afe23aaa40 | ||
|
|
da59401ca7 | ||
|
|
f7cbf776ae | ||
|
|
7700026d87 | ||
|
|
7b2fb257eb | ||
|
|
a0c8363852 | ||
|
|
89fa9bee6f | ||
|
|
7a5bcf2722 | ||
|
|
7ec0603e00 | ||
|
|
437a05e5d6 | ||
|
|
e66c7127ac | ||
|
|
a5992fdb38 | ||
|
|
19e1ae587f | ||
|
|
0f79d99c8b | ||
|
|
accb5b79f8 | ||
|
|
c7640e3fd9 | ||
|
|
0f156770f6 | ||
|
|
fb40809078 | ||
|
|
4d87a9822b | ||
|
|
925c978bbc | ||
|
|
f2eabd65b0 | ||
|
|
449e47f721 | ||
|
|
670d078eae | ||
|
|
a3494ee831 | ||
|
|
107a1f3eb4 | ||
|
|
73bfc064ea | ||
|
|
eb7da379ef | ||
|
|
d4359be92f | ||
|
|
971c24af2d | ||
|
|
4371366db4 | ||
|
|
15f522a218 | ||
|
|
98c67e0e82 | ||
|
|
c0da7ae086 | ||
|
|
81397c1319 | ||
|
|
b4d40f01e8 | ||
|
|
c2d724e6cc | ||
|
|
75b6c56dcc | ||
|
|
36159d2fb9 | ||
|
|
cd6033ad3b | ||
|
|
c7a0e285e0 | ||
|
|
a1a66c1920 | ||
|
|
63eb1b10b3 | ||
|
|
fddc7670aa | ||
|
|
afb3d789ba | ||
|
|
5ca66d7469 | ||
|
|
2b79c6380f | ||
|
|
38cbf06579 | ||
|
|
58ecbd63cf | ||
|
|
1659bf20d4 | ||
|
|
9aa76857b0 | ||
|
|
a46bf3364a | ||
|
|
ff5d233509 | ||
|
|
cec6081218 | ||
|
|
d19ef19a5b | ||
|
|
4ba8ce74cb | ||
|
|
40637c8881 | ||
|
|
bc2ebaa314 | ||
|
|
3c6df7334a | ||
|
|
c1daf510f8 | ||
|
|
d96804bdfb | ||
|
|
2a16edcbe7 | ||
|
|
7f28129c00 | ||
|
|
ecd3e316c5 | ||
|
|
592c277735 | ||
|
|
15774d377d | ||
|
|
37b0d0e3b0 | ||
|
|
05b2c1b9bd | ||
|
|
18a3f1b36e | ||
|
|
9e62356a30 | ||
|
|
e86fbda144 | ||
|
|
1b6ff44312 | ||
|
|
1e4e039672 | ||
|
|
ffd5fb12cc | ||
|
|
8de8368df7 | ||
|
|
c89fe9bef3 | ||
|
|
e9aaa5d5bf | ||
|
|
cd63dda271 | ||
|
|
22206de5ab | ||
|
|
f11070dc60 | ||
|
|
2424c543d6 | ||
|
|
3dac2d3073 | ||
|
|
2b6f068784 | ||
|
|
c38ae0d4a9 | ||
|
|
133538881f | ||
|
|
51453db3c3 | ||
|
|
7615d6af88 | ||
|
|
ee01f7823f | ||
|
|
151ec151db | ||
|
|
28959a357c | ||
|
|
ec3ef25f38 | ||
|
|
31a653449c | ||
|
|
6b2cb49881 | ||
|
|
4f05dbd61f | ||
|
|
c23edf0fb8 | ||
|
|
9dea6dec4e | ||
|
|
e8c9eda1fa | ||
|
|
cfee32ff35 | ||
|
|
389bb59531 | ||
|
|
9fa0dedb42 | ||
|
|
af2fe02806 | ||
|
|
ead7bfcb33 | ||
|
|
b4c9fd47c2 | ||
|
|
a7977139a7 | ||
|
|
4f61306d79 | ||
|
|
db808bb794 | ||
|
|
65dd800526 | ||
|
|
21e8eb1d09 | ||
|
|
d9dc72e3e1 | ||
|
|
07c579af94 | ||
|
|
09ec19fcab | ||
|
|
61ece03aa3 | ||
|
|
5897f89a76 | ||
|
|
15e2103f66 | ||
|
|
6cb050188e | ||
|
|
1eca639c19 | ||
|
|
9fc645c54a | ||
|
|
8a2e992ca5 | ||
|
|
c470e63bac | ||
|
|
1c6064fdb7 | ||
|
|
014dd380cd | ||
|
|
cb4a0c877d | ||
|
|
a00952a377 | ||
|
|
9f5f999989 | ||
|
|
1145f637c7 | ||
|
|
172c7ec91d | ||
|
|
79d704c1cd | ||
|
|
dfd855f380 | ||
|
|
5e111e002a | ||
|
|
3de5947d42 | ||
|
|
c3f4e03f33 | ||
|
|
ad1a7e44bc | ||
|
|
969d30924b | ||
|
|
8b8868760c | ||
|
|
12e0fa9aea | ||
|
|
3999baf8eb | ||
|
|
270b3c711e | ||
|
|
0feb8f574a | ||
|
|
5c66ac3d8a | ||
|
|
3f13e490b3 | ||
|
|
c4b2a5d7e7 | ||
|
|
28b7199424 | ||
|
|
b90456c6f4 | ||
|
|
e1091b72f8 | ||
|
|
2764f16c20 | ||
|
|
fd5681b630 | ||
|
|
0e88a177b3 | ||
|
|
7ad10f92e8 | ||
|
|
e880da210a | ||
|
|
421fd76fe6 | ||
|
|
29ce003fa8 | ||
|
|
b9d79d2fe1 | ||
|
|
898622ff6c | ||
|
|
0a5bf38359 | ||
|
|
005937236b | ||
|
|
b6389419c0 | ||
|
|
56f4d8aad5 | ||
|
|
067cba703b | ||
|
|
5689c3e5ef | ||
|
|
20c2716915 | ||
|
|
d7c2abb43d | ||
|
|
31cb235023 | ||
|
|
d82a1c0414 | ||
|
|
26efd30436 | ||
|
|
5e5a59d960 | ||
|
|
2f78e45444 | ||
|
|
60ec6da886 | ||
|
|
23681240d8 | ||
|
|
b9438f2791 | ||
|
|
5d44be21fa | ||
|
|
d5c956e626 | ||
|
|
d0e62ad980 | ||
|
|
706e5c29ea | ||
|
|
d194ae1edf | ||
|
|
ff1d4f164a | ||
|
|
67af475339 | ||
|
|
acbeb297b2 | ||
|
|
efa51b1a6b | ||
|
|
e962c4857a | ||
|
|
2dfaed38b8 | ||
|
|
6af1ac30df | ||
|
|
a0e79bc90a | ||
|
|
2713f2b127 | ||
|
|
24d7c4742c | ||
|
|
0abb76e785 | ||
|
|
4e23dd36e1 | ||
|
|
8284545ca7 | ||
|
|
fde369c5d9 | ||
|
|
9142cdde2d | ||
|
|
9e10b1b497 | ||
|
|
027fa10f04 | ||
|
|
0eafdd0985 | ||
|
|
87a1628bbe | ||
|
|
1a9a21e321 | ||
|
|
81005ec8f4 | ||
|
|
d14857fa47 | ||
|
|
92e589699c | ||
|
|
87e76f6314 | ||
|
|
243e77fba4 | ||
|
|
07ea7ec5a4 | ||
|
|
b07c9df812 | ||
|
|
8bd918129c | ||
|
|
20a227fc9e | ||
|
|
50150ce23d | ||
|
|
1170c5ee47 | ||
|
|
b380930493 | ||
|
|
d8e864c353 | ||
|
|
8501154084 | ||
|
|
c7a843d9ed | ||
|
|
969f05c7d2 | ||
|
|
23af220f9c | ||
|
|
733bd780d5 | ||
|
|
47e901afa0 | ||
|
|
5e615315f5 | ||
|
|
6dd1cbccc7 | ||
|
|
733ef6332a | ||
|
|
800fad89ea | ||
|
|
6bfb5ed963 | ||
|
|
99949cf1ee | ||
|
|
6c9074f1be | ||
|
|
e66af572d3 | ||
|
|
baf92a8902 | ||
|
|
9f6ab94992 | ||
|
|
d5ee57863a | ||
|
|
93a4d5b2bd | ||
|
|
138ebafbb5 | ||
|
|
371929c127 | ||
|
|
ecbc9bd14a | ||
|
|
e98ceb7f7e | ||
|
|
96901ef29b | ||
|
|
324e109c57 | ||
|
|
94888c078a | ||
|
|
288b53404c | ||
|
|
3767d9f607 | ||
|
|
c47bacf6c5 | ||
|
|
036707957c | ||
|
|
8207c29d9c | ||
|
|
4d3a9fd3ac | ||
|
|
3395c54843 | ||
|
|
900aa73959 | ||
|
|
47f4c1b865 | ||
|
|
3a999e60f8 | ||
|
|
27a590a566 | ||
|
|
01b27fb61d | ||
|
|
454bf7ba4a | ||
|
|
90d4f3f10b | ||
|
|
96bfc09c51 | ||
|
|
d4c3f5a090 | ||
|
|
6ec8e7a669 | ||
|
|
6e4b690bd8 | ||
|
|
465e4c40ab | ||
|
|
db8be3e480 | ||
|
|
d0eecc94f2 | ||
|
|
225f6b24b2 | ||
|
|
1922a08742 | ||
|
|
900f8cbc90 | ||
|
|
9ef94ea291 | ||
|
|
3e865660e4 | ||
|
|
dceadd585a | ||
|
|
16644bb9a6 | ||
|
|
3675a64a16 | ||
|
|
bd1d6d00c3 | ||
|
|
5a8420f7a1 | ||
|
|
9b80839948 | ||
|
|
efb354ba38 | ||
|
|
0846f2f0d9 | ||
|
|
29f4260b2a | ||
|
|
d4655203ba | ||
|
|
6ff67312aa | ||
|
|
b6c19297a0 | ||
|
|
af9edbbc3b | ||
|
|
71fac1e507 | ||
|
|
0994ff0e48 | ||
|
|
eb5e979210 | ||
|
|
13f7e7e095 | ||
|
|
46fff9464f | ||
|
|
4f84fdca34 | ||
|
|
8037105d86 | ||
|
|
5ea70fabbc | ||
|
|
11b5db815e | ||
|
|
e311f2b58f | ||
|
|
79cbffe9d4 | ||
|
|
20340be9ac | ||
|
|
25b70492bc | ||
|
|
68c5514a44 | ||
|
|
6869b08a77 | ||
|
|
4c82c55f58 | ||
|
|
afdecb6c51 | ||
|
|
592e0586b4 | ||
|
|
5150e2f478 | ||
|
|
f2c951ac73 | ||
|
|
e67d7ba98f | ||
|
|
703ae3b776 | ||
|
|
077adc0cb0 | ||
|
|
78c546e7e1 | ||
|
|
fbdf2d84b7 | ||
|
|
04a8505e86 | ||
|
|
4d7bed7057 | ||
|
|
26bb2787d3 | ||
|
|
c1fb3cb3ba |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
/target
|
||||
.aider*
|
||||
.crush
|
||||
|
||||
84
AGENT.md
Normal file
84
AGENT.md
Normal file
@@ -0,0 +1,84 @@
|
||||
# Agent Configuration
|
||||
|
||||
**IMPORTANT:** Prefer to use the `write_file` tool if the edit is for the majority of a file, or if you are correcting previous problems made edits from other tools.
|
||||
|
||||
## Tools
|
||||
|
||||
**IMPORTANT**: Be very careful when quoting text in tool calls to add the right amount of escaping.
|
||||
|
||||
### `write_file`
|
||||
|
||||
When editing files use the `write_file` tool to output the complete version of the corrected file.
|
||||
**IMPORTANT**: You must provide the whole file to `write_file`, even the unchanged parts.
|
||||
|
||||
## Build/Test Commands
|
||||
|
||||
**IMPORTANT**: Do not run application, start the web server, or the trunk server.
|
||||
**IMPORTANT:** The cargo command cannot be ran in parallel.
|
||||
|
||||
```bash
|
||||
# Check project
|
||||
TERM=dumb cargo check
|
||||
|
||||
# Build project
|
||||
TERM=dumb cargo build
|
||||
|
||||
# DO NOT RUN RUN APPLICATION (native)
|
||||
# TERM=dumb cargo run
|
||||
|
||||
# Run all tests
|
||||
TERM=dumb cargo test
|
||||
|
||||
# Run specific test (by name substring)
|
||||
TERM=dumb cargo test test_function_name
|
||||
|
||||
# Run specific test with verbose output
|
||||
TERM=dumb cargo test test_function_name -- --nocapture
|
||||
|
||||
# Check formatting
|
||||
TERM=dumb cargo fmt --check
|
||||
|
||||
# Apply formatting
|
||||
TERM=dumb cargo fmt
|
||||
|
||||
# Lint with clippy
|
||||
TERM=dumb cargo clippy -- -D warnings
|
||||
|
||||
# Build for release
|
||||
TERM=dumb cargo build --release
|
||||
```
|
||||
|
||||
Prefix commands with `TERM=dumb` for consistent output.
|
||||
|
||||
## Code Style Guidelines
|
||||
|
||||
### Imports
|
||||
- Group imports in order: standard library, external crates, local modules
|
||||
- Use explicit imports over glob imports (`use std::fs::File;` not `use std::fs::*;`)
|
||||
|
||||
### Documentation
|
||||
- Document all public APIs with rustdoc
|
||||
- Use examples in documentation only when helpful
|
||||
|
||||
## Procedures
|
||||
|
||||
### Fix build problems
|
||||
|
||||
1. Check the project: `TERM=dumb cargo check`.
|
||||
2. If there are errors or warnings, create a new sub agent (expert rust developer) that uses the `TERM=dumb cargo check` output as input, planned using strategic thinking.
|
||||
a. Read all affected files
|
||||
d. Plan the fixes using strategic thinking:
|
||||
- Read other files if they provide context or examples
|
||||
- Look up relevant API information
|
||||
- Do not downgrade versions
|
||||
- Preserve functionality
|
||||
- Use `TERM=dumb cargo fix` if appropriate.
|
||||
- Prefer the `write_file` tool if there is evidence of double escaping
|
||||
- You must generate the full file contents when using `write_file` or it will be truncated.
|
||||
c. Return the list of files modified
|
||||
3. If any files were modified, loop back to 1.
|
||||
|
||||
### Fix formatting
|
||||
|
||||
1. Format the project the project: `TERM=dumb cargo fmt`
|
||||
2. Continue with the fix build problems procedure.
|
||||
2314
Cargo.lock
generated
2314
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
108
Cargo.toml
108
Cargo.toml
@@ -6,49 +6,99 @@ description = "Keep and manage temporary files with automatic compression and me
|
||||
readme = "README.md"
|
||||
categories = ["command-line-utilities"]
|
||||
|
||||
[[test]]
|
||||
name = "tests"
|
||||
path = "src/tests.rs"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.72"
|
||||
axum = { version = "0.8.4", optional = true }
|
||||
derive_more = { version = "2.0", features = ["full"] }
|
||||
smart-default = "0.7"
|
||||
thiserror = "1.0"
|
||||
base64 = "0.22.1"
|
||||
chrono = { version = "0.4.26", features = ["serde"] }
|
||||
clap = { version = "4.3.10", features = ["derive", "env"] }
|
||||
directories = "5.0.1"
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2.147"
|
||||
log = "0.4.19"
|
||||
rusqlite = { version = "0.29.0", features = ["bundled", "array", "chrono"] }
|
||||
rusqlite_migration = "1.0.2"
|
||||
stderrlog = "0.5.4"
|
||||
strum_macros = "0.25"
|
||||
strum = { version = "0.25", features = ["derive"] }
|
||||
prettytable-rs = "0.10.0"
|
||||
chrono = "0.4.26"
|
||||
gethostname = "0.4.3"
|
||||
humansize = "2.1.3"
|
||||
config = "0.14.0"
|
||||
ctor = "0.2"
|
||||
directories = "6.0.0"
|
||||
dns-lookup = "2.0.2"
|
||||
enum-map = "2.6.1"
|
||||
flate2 = { version = "1.0.27", features = ["zlib-ng-compat"], optional = true }
|
||||
futures = "0.3"
|
||||
gethostname = "1.0.2"
|
||||
humansize = "2.1.3"
|
||||
hyper = { version = "1.0", features = ["full"] }
|
||||
inventory = "0.3"
|
||||
is-terminal = "0.4.9"
|
||||
term = "0.7.0"
|
||||
lz4_flex = "0.11.1"
|
||||
flate2 = { version = "1.0.27", features = ["zlib-ng-compat"] }
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2.147"
|
||||
local-ip-address = "0.6.5"
|
||||
log = "0.4.19"
|
||||
lz4_flex = { version = "0.11.1", optional = true }
|
||||
magic = { version = "0.13.0", optional = true }
|
||||
nix = "0.30.1"
|
||||
once_cell = "1.19.0"
|
||||
comfy-table = "7.2.0"
|
||||
pwhash = "1.0.0"
|
||||
regex = "1.9.5"
|
||||
nix = "0.26.2"
|
||||
sha2 = "0.10.0"
|
||||
local-ip-address = "0.5.5"
|
||||
dns-lookup = "2.0.2"
|
||||
uzers = "0.11.3"
|
||||
ringbuf = "0.3"
|
||||
rmcp = { version = "0.2.0", features = ["server"], optional = true }
|
||||
rusqlite = { version = "0.37.0", features = ["bundled", "array", "chrono"] }
|
||||
rusqlite_migration = "2.3.0"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.142"
|
||||
serde_yaml = "0.9.34"
|
||||
sha2 = "0.10.0"
|
||||
md5 = "0.7.0"
|
||||
stderrlog = "0.6.0"
|
||||
strum = { version = "0.27.2", features = ["derive"] }
|
||||
term = "1.1.0"
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
axum = "0.7"
|
||||
tower = "0.4"
|
||||
tower-http = { version = "0.5", features = ["cors", "fs", "trace"] }
|
||||
hyper = { version = "1.0", features = ["full"] }
|
||||
tokio-stream = "0.1"
|
||||
tokio-util = "0.7.16"
|
||||
tower = { version = "0.5.2", optional = true }
|
||||
tower-http = { version = "0.6.6", features = ["cors", "fs", "trace"], optional = true }
|
||||
utoipa = { version = "5.4.0", features = ["axum_extras"], optional = true }
|
||||
utoipa-swagger-ui = { version = "9.0.2", features = ["axum"], optional = true }
|
||||
uzers = "0.12.1"
|
||||
which = "8.0.0"
|
||||
xdg = "2.5.2"
|
||||
strip-ansi-escapes = "0.2.1"
|
||||
pest = "2.8.1"
|
||||
pest_derive = "2.8.1"
|
||||
dirs = "6.0.0"
|
||||
|
||||
[features]
|
||||
# Default features include core compression engines and swagger UI
|
||||
default = ["magic", "lz4", "gzip"]
|
||||
|
||||
# Full
|
||||
#default = ["server", "magic", "lz4", "swagger"]
|
||||
|
||||
|
||||
# Server feature (includes axum and related dependencies)
|
||||
server = ["dep:axum", "dep:tower", "dep:tower-http", "dep:utoipa"]
|
||||
|
||||
# Compression features
|
||||
gzip = ["flate2"]
|
||||
lz4 = ["lz4_flex"]
|
||||
bzip2 = []
|
||||
xz = []
|
||||
zstd = []
|
||||
|
||||
# Plugin features (meta and filter)
|
||||
all-meta-plugins = ["dep:magic"]
|
||||
all-filter-plugins = []
|
||||
|
||||
# Individual plugin features
|
||||
magic = ["dep:magic"]
|
||||
|
||||
# MCP feature (Model Context Protocol support)
|
||||
mcp = ["dep:rmcp"]
|
||||
|
||||
# Swagger UI feature
|
||||
swagger = ["dep:utoipa-swagger-ui"]
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.3.0"
|
||||
|
||||
rand = "0.8.5"
|
||||
|
||||
128
DESIGN.md
128
DESIGN.md
@@ -47,17 +47,131 @@
|
||||
- `compression_engine/none.rs` - No compression implementation
|
||||
- `compression_engine/program.rs` - External program wrapper
|
||||
|
||||
### Digest Functionality
|
||||
- Digest functionality is now integrated into meta plugins
|
||||
- SHA-256 and other digest algorithms are implemented as meta plugins
|
||||
- External digest programs are supported through meta plugin program wrapper
|
||||
|
||||
### Meta Plugin Module
|
||||
- `meta_plugin.rs` - Trait and type definitions
|
||||
- `meta_plugin/program.rs` - External program wrapper
|
||||
- `meta_plugin/digest.rs` - Internal digest implementations
|
||||
- `meta_plugin/system.rs` - System information metadata plugins
|
||||
|
||||
### Plugins Module
|
||||
### Common Modules
|
||||
- `common/is_binary.rs` - Binary file detection utilities
|
||||
- `common/status.rs` - Status information generation
|
||||
|
||||
### Utility Modules
|
||||
- `plugins.rs` - Shared plugin utilities
|
||||
- Contains `ProgramWriter` for external process communication
|
||||
- `args.rs` - CLI argument definitions
|
||||
|
||||
## Command Line Interface
|
||||
|
||||
### Modes
|
||||
- Save mode: `keep [--save]` (default when no mode specified and no IDs provided)
|
||||
- Get mode: `keep [--get] <ID|tag...>` (default when IDs provided)
|
||||
- List mode: `keep [--list] [tag...]`
|
||||
- Info mode: `keep [--info] <ID|tag...>`
|
||||
- Delete mode: `keep [--delete] <ID...>`
|
||||
- Update mode: `keep [--update] <ID> [tag...]`
|
||||
- Diff mode: `keep [--diff] <ID1> <ID2>`
|
||||
- Status mode: `keep [--status]`
|
||||
- Server mode: `keep [--server] <address:port>`
|
||||
|
||||
### Item Options
|
||||
- `--meta KEY[=VALUE]` - Set metadata for the item, remove if VALUE not provided
|
||||
- `--digest <sha256|md5>` - Digest algorithm to use when saving items
|
||||
- `--compression <lz4|gzip|bzip2|xz|zstd|none>` - Compression algorithm to use when saving items
|
||||
- `--meta-plugins <plugin[,plugin...]>` - Meta plugins to use when saving items
|
||||
|
||||
### General Options
|
||||
- `--dir <PATH>` - Specify the directory to use for storage
|
||||
- `--list-format <FORMAT>` - A comma separated list of columns to display with --list
|
||||
- `--human-readable` - Display file sizes with units
|
||||
- `--verbose` - Increase message verbosity
|
||||
- `--quiet` - Do not show any messages
|
||||
- `--output-format <table|json|yaml>` - Output format for info, status, and list modes
|
||||
- `--server-password <PASSWORD>` - Password for server authentication
|
||||
- `--force` - Force output even when binary data would be sent to a TTY
|
||||
|
||||
## Data Storage
|
||||
|
||||
### Database Schema
|
||||
- `items` table: id (primary key), ts (timestamp), size (optional), compression
|
||||
- `tags` table: id (foreign key to items), name (tag name)
|
||||
- `metas` table: id (foreign key to items), name (meta key), value (meta value)
|
||||
- Indexes on tag names and meta names for faster queries
|
||||
|
||||
### File Storage
|
||||
- Data directory contains compressed item files named by their item ID
|
||||
- Database file stored in data directory
|
||||
- File permissions set to be private to user (umask 077)
|
||||
|
||||
## REST API Endpoints
|
||||
|
||||
### Status Operations
|
||||
- `GET /api/status` - Get system status information
|
||||
|
||||
### Item Operations
|
||||
- `GET /api/item/` - Get a list of items as JSON. Optional params: `order=newest|oldest`, `start=0`, `count=100`, `tags[]=tag1&tags[]=tag2`
|
||||
- `POST /api/item/` - Add a new item
|
||||
- `DELETE /api/item/<#>` - Delete an item
|
||||
- `GET /api/item/latest` - Return the latest item as JSON. Optional params: `tags[]=tag1&tags[]=tag2`, `allow_binary=true|false`
|
||||
- `GET /api/item/latest/meta` - Return the latest item metadata as JSON. Optional params: `tags[]=tag1&tags[]=tag2`
|
||||
- `GET /api/item/latest/content` - Return the raw content of the latest item. Optional params: `tags[]=tag1&tags[]=tag2`
|
||||
- `GET /api/item/<#>` - Return the item as JSON. Optional params: `allow_binary=true|false`
|
||||
- `GET /api/item/<#>/meta` - Return the item metadata as JSON
|
||||
- `GET /api/item/<#>/content` - Return the raw content of the item
|
||||
|
||||
### Authentication
|
||||
- Bearer token authentication: `Authorization: Bearer <password>`
|
||||
- Basic authentication: `Authorization: Basic base64(keep:<password>)`
|
||||
- When no password is set, authentication is disabled
|
||||
|
||||
## Supported Compression Types
|
||||
- LZ4 (internal implementation)
|
||||
- GZip (internal implementation)
|
||||
- BZip2 (external program)
|
||||
- XZ (external program)
|
||||
- ZStd (external program)
|
||||
- None (no compression)
|
||||
|
||||
## Supported Meta Plugins
|
||||
- FileMagic - File type detection using file command
|
||||
- FileMime - MIME type detection using file command
|
||||
- FileEncoding - File encoding detection using file command
|
||||
- LineCount - Line count using wc command
|
||||
- WordCount - Word count using wc command
|
||||
- Cwd - Current working directory
|
||||
- Binary - Binary file detection
|
||||
- Uid - Current user ID
|
||||
- User - Current username
|
||||
- Gid - Current group ID
|
||||
- Group - Current group name
|
||||
- Shell - Shell path from SHELL environment variable
|
||||
- ShellPid - Shell process ID from PPID environment variable
|
||||
- KeepPid - Keep process ID
|
||||
- DigestSha256 - SHA-256 digest
|
||||
- DigestMd5 - MD5 digest using md5sum command
|
||||
- ReadTime - Time taken to read data
|
||||
- ReadRate - Rate of data reading
|
||||
- Hostname - System hostname
|
||||
- FullHostname - Fully qualified domain name
|
||||
|
||||
## Testing Strategy
|
||||
- Unit tests for each module in `src/tests/`
|
||||
- Integration tests for modes
|
||||
- Database tests for CRUD operations
|
||||
- Compression engine tests for each supported format
|
||||
- Meta plugin tests for each plugin type
|
||||
- Server tests for API endpoints and authentication
|
||||
- Common utilities tests for helper functions
|
||||
|
||||
## Binary Data Handling
|
||||
- Automatic binary detection using file signatures and heuristics
|
||||
- Prevents binary data output to TTY unless --force is used
|
||||
- Binary meta plugin analyzes content to determine if it's binary
|
||||
- API endpoints respect binary flags to prevent accidental binary transmission
|
||||
|
||||
## Security Considerations
|
||||
- File permissions are restricted to user only (umask 077)
|
||||
- Input validation for item IDs to prevent path traversal
|
||||
- Authentication for server mode with bearer or basic auth
|
||||
- Proper resource cleanup using RAII patterns
|
||||
- Safe handling of external processes with proper stdin/stdout management
|
||||
|
||||
81
PLAN.md
81
PLAN.md
@@ -1,81 +0,0 @@
|
||||
# Code Quality Issues and Fixes
|
||||
|
||||
## Critical Issues
|
||||
|
||||
### 1. Memory Safety & Resource Leaks - DONE
|
||||
**Files affected:** `src/modes/diff.rs`, `src/compression_engine/program.rs`
|
||||
**Functions affected:** `mode_diff()`, `CompressionEngineProgram::open()`, `CompressionEngineProgram::create()`
|
||||
**Problem example:** Raw file descriptors converted with `unsafe { std::fs::File::from_raw_fd(fd_write) }` without proper cleanup on errors
|
||||
**Fix example:** Use RAII wrappers or ensure proper cleanup in Drop implementations and error paths
|
||||
|
||||
### 2. Error Handling Problems - DONE
|
||||
**Files affected:** `src/modes/save.rs`, `src/modes/update.rs`, `src/db.rs`
|
||||
**Functions affected:** `mode_save()`, `mode_update()`, `get_item()`, `insert_item()`
|
||||
**Problem example:** `item.id.unwrap()` can panic if item.id is None
|
||||
**Fix example:** Replace with `item.id.ok_or_else(|| anyhow!("Item missing ID"))?`
|
||||
|
||||
### 3. Concurrency Issues - DONE
|
||||
**Files affected:** `src/modes/diff.rs`, `src/meta_plugin/digest.rs`
|
||||
**Functions affected:** `mode_diff()`, meta plugin `update()` methods
|
||||
**Problem example:** In `mode_diff()`, if writer threads panic, resources may not be cleaned up properly: `writer_thread_a.join()` only propagates panic but doesn't ensure file descriptors are closed
|
||||
**Fix example:** Use RAII guards or ensure cleanup in panic handlers: `let _fd_guard = FileDescriptorGuard::new(fd_write);`
|
||||
|
||||
## Design Problems
|
||||
|
||||
### 4. Database Design Issues - DONE
|
||||
**Files affected:** `src/db.rs`, `src/modes/save.rs`, `src/modes/update.rs`
|
||||
**Functions affected:** `insert_item()`, `update_item()`, `store_meta()`, `set_item_tags()`
|
||||
**Problem example:** Multiple database operations without transactions can leave partial state
|
||||
**Fix example:** Wrap related operations in `conn.transaction()` blocks
|
||||
|
||||
### 5. Plugin Architecture Flaws
|
||||
**Files affected:** `src/meta_plugin.rs`, `src/meta_plugin/digest.rs`, `src/meta_plugin/program.rs`
|
||||
**Functions affected:** `MetaPlugin::create()`, `MetaPlugin::update()`, `MetaPlugin::finalize()`
|
||||
**Problem example:**
|
||||
- `create()` returns dummy writer that's never used, inconsistent with actual usage pattern
|
||||
- `MetaPluginProgram::finalize()` spawns new process instead of reusing existing one
|
||||
- No validation that meta plugins produce valid output formats
|
||||
- Plugin errors are silently ignored in save operations
|
||||
**Fix example:**
|
||||
- Remove `create()` method and rely only on `update()`/`finalize()` pattern
|
||||
- Reuse single process per plugin instance for better performance
|
||||
- Add output validation and proper error propagation
|
||||
|
||||
### 6. Security Concerns - DONE
|
||||
**Files affected:** `src/main.rs`, `src/modes/get.rs`, `src/modes/delete.rs`
|
||||
**Functions affected:** `main()`, `mode_get()`, `mode_delete()`
|
||||
**Problem example:** Item IDs used directly in file paths without validation: `item_path.push(item_id.to_string())`
|
||||
**Fix example:** Validate item IDs are positive integers and sanitize file paths
|
||||
|
||||
## Performance Issues
|
||||
|
||||
### 7. Inefficient Operations
|
||||
**Files affected:** `src/modes/save.rs`, `src/compression_engine.rs`
|
||||
**Functions affected:** `mode_save()`, `CompressionEngine::size()`
|
||||
**Problem example:** Fixed BUFSIZ buffer (typically 8KB) may not be optimal for all scenarios, especially large files or fast storage
|
||||
**Fix example:** Use adaptive buffer sizing based on file size or storage characteristics, or allow configuration via environment variable
|
||||
|
||||
### 8. I/O Problems
|
||||
**Files affected:** `src/meta_plugin/program.rs`, `src/compression_engine/program.rs`
|
||||
**Functions affected:** `MetaPluginProgram::finalize()`, `CompressionEngineProgram::open()`, `CompressionEngineProgram::create()`
|
||||
**Problem example:** Meta plugin processes can block indefinitely if they hang or produce large output without proper timeouts
|
||||
**Fix example:** Add timeouts to process operations and non-blocking I/O for meta plugins: `process.wait_timeout(Duration::from_secs(30))`
|
||||
|
||||
## Code Quality Issues
|
||||
|
||||
### 9. Error Messages
|
||||
**Files affected:** `src/modes/common.rs`, `src/main.rs`
|
||||
**Functions affected:** `cmd_args_digest_type()`, `cmd_args_compression_type()`, `main()`
|
||||
**Problem example:** `format!("Unknown digest type: {}", digest_name)` exposes internal terminology
|
||||
**Fix example:** `format!("Invalid digest algorithm '{}'. Use 'sha256' or 'md5'", digest_name)`
|
||||
|
||||
### 10. Code Organization
|
||||
**Files affected:** `src/modes/save.rs`, `src/modes/diff.rs`
|
||||
**Functions affected:** `mode_save()`, `mode_diff()`
|
||||
**Problem example:** Large functions doing multiple responsibilities
|
||||
**Fix example:** Split into smaller functions:
|
||||
- `src/modes/save.rs: mode_save()` → `setup_compression_and_plugins()`, `process_input_stream()`, `finalize_meta_plugins()`, `save_item_to_database()`
|
||||
- `src/modes/diff.rs: mode_diff()` → `validate_diff_args()`, `setup_diff_pipes()`, `spawn_writer_threads()`, `execute_diff_command()`, `handle_diff_output()`
|
||||
- `src/modes/diff.rs: write_item_to_pipe()` → `open_item_reader()`, `copy_item_data()`
|
||||
|
||||
|
||||
|
||||
16
README.md
16
README.md
@@ -0,0 +1,16 @@
|
||||
# Keep - Temporary File Management with Compression and Metadata
|
||||
|
||||
Keep is a command-line tool for managing temporary files with automatic compression, metadata generation, and querying capabilities. It supports various compression algorithms and metadata plugins for rich item inspection.
|
||||
|
||||
## Features
|
||||
|
||||
- **Store and Retrieve**: Save content with automatic compression and retrieve by ID or tags.
|
||||
- **Compression Support**: Built-in support for LZ4, GZip, and more via external programs (BZip2, XZ, ZStd).
|
||||
- **Metadata Plugins**: Automatic extraction of file type, digests, hostname, user info, and custom metadata.
|
||||
- **Filtering**: Apply filters (head, tail, grep, etc.) when retrieving content.
|
||||
- **Querying**: List, search, and diff items with flexible formatting.
|
||||
- **REST API Server**: Optional HTTP server for programmatic access.
|
||||
- **Modular Design**: Extensible via plugins for compression, metadata, and filtering.
|
||||
|
||||
## Installation
|
||||
|
||||
|
||||
@@ -14,3 +14,4 @@ set mydir [ file normalize $mydir_base ]
|
||||
module-whatis Keep
|
||||
|
||||
prepend-path PATH $mydir/bin
|
||||
setenv KEEP_BASH_PROFILE ${mydir}/profile.bash
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
function __keep_preexec {
|
||||
KEEP_META_command="$1"
|
||||
KEEP_META_directory=${KEEP_META_directory:-${PWD}}
|
||||
KEEP_META_hostname=${KEEP_META_hostname:-${HOSTNAME:-$(hostname -f)}}
|
||||
KEEP_META_tty=${KEEP_META_tty:-$(tty)}
|
||||
}
|
||||
|
||||
@@ -20,10 +18,6 @@ function __keep_preexec_init {
|
||||
if [[ $found = false ]]; then
|
||||
preexec_functions+=(__keep_preexec)
|
||||
fi
|
||||
|
||||
if [[ -z $KEEP_LIST_FORMAT ]]; then
|
||||
export KEEP_LIST_FORMAT="id,time,size,tags,meta:hostname,meta:command"
|
||||
fi
|
||||
}
|
||||
|
||||
function keep {
|
||||
@@ -32,8 +26,6 @@ function keep {
|
||||
export KEEP_META_command
|
||||
fi
|
||||
|
||||
export KEEP_META_directory
|
||||
export KEEP_META_hostname
|
||||
export KEEP_META_tty
|
||||
|
||||
exec keep "$@"
|
||||
|
||||
191
src/args.rs
Normal file
191
src/args.rs
Normal file
@@ -0,0 +1,191 @@
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use clap::*;
|
||||
|
||||
/// Main struct for command-line arguments, parsed via Clap.
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Args {
|
||||
#[command(flatten)]
|
||||
pub mode: ModeArgs,
|
||||
#[command(flatten)]
|
||||
pub item: ItemArgs,
|
||||
#[command(flatten)]
|
||||
pub options: OptionsArgs,
|
||||
|
||||
#[arg(help("A list of either item IDs or tags"))]
|
||||
#[arg(value_parser = clap::value_parser!(NumberOrString))]
|
||||
#[arg(required = false)]
|
||||
pub ids_or_tags: Vec<NumberOrString>,
|
||||
}
|
||||
|
||||
/// Struct for mode-specific arguments, defining CLI flags for different operations.
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
pub struct ModeArgs {
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["get", "diff", "list", "delete", "info", "status"]))]
|
||||
#[arg(help("Save an item using any tags or metadata provided"))]
|
||||
pub save: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "diff", "list", "delete", "info", "status"]))]
|
||||
#[arg(help(
|
||||
"Get an item either by it's ID or by a combination of matching tags and metatdata"
|
||||
))]
|
||||
pub get: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), long, conflicts_with_all(["save", "get", "list", "delete", "info", "status"]))]
|
||||
#[arg(help("Show a diff between two items by ID"))]
|
||||
pub diff: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "get", "diff", "delete", "info", "status"]))]
|
||||
#[arg(help("List items, filtering on tags or metadata if given"))]
|
||||
pub list: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "get", "diff", "list", "info", "status"]))]
|
||||
#[arg(help("Delete items either by ID or by matching tags"))]
|
||||
#[arg(requires = "ids_or_tags")]
|
||||
pub delete: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "get", "diff", "list", "delete", "status"]))]
|
||||
#[arg(help(
|
||||
"Get an item either by it's ID or by a combination of matching tags and metatdata"
|
||||
))]
|
||||
pub info: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short('S'), long, conflicts_with_all(["save", "get", "diff", "list", "delete", "info", "server", "status_plugins"]))]
|
||||
#[arg(help("Show status of directories and supported compression algorithms"))]
|
||||
pub status: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), long, conflicts_with_all(["save", "get", "diff", "list", "delete", "info", "status", "server"]))]
|
||||
#[arg(help("Show available plugins and their configurations"))]
|
||||
pub status_plugins: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), long, conflicts_with_all(["save", "get", "diff", "list", "delete", "info", "status"]))]
|
||||
#[arg(help("Start REST HTTP server"))]
|
||||
pub server: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), long, conflicts_with_all(["save", "get", "diff", "list", "delete", "info", "status", "server"]))]
|
||||
#[arg(help("Generate default configuration and output to stdout"))]
|
||||
pub generate_config: bool,
|
||||
|
||||
#[arg(help_heading("Server Options"), long, env("KEEP_SERVER_ADDRESS"))]
|
||||
#[arg(help("Server address to bind to"))]
|
||||
pub server_address: Option<String>,
|
||||
|
||||
#[arg(help_heading("Server Options"), long, env("KEEP_SERVER_PORT"))]
|
||||
#[arg(help("Server port to bind to"))]
|
||||
pub server_port: Option<u16>,
|
||||
}
|
||||
|
||||
/// Struct for item-specific arguments, such as compression and plugins.
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
pub struct ItemArgs {
|
||||
#[arg(help_heading("Item Options"), short, long, env("KEEP_COMPRESSION"))]
|
||||
#[arg(help("Compression algorithm to use when saving items"))]
|
||||
pub compression: Option<String>,
|
||||
|
||||
#[arg(
|
||||
help_heading("Item Options"),
|
||||
short('M'),
|
||||
long,
|
||||
env("KEEP_META_PLUGINS")
|
||||
)]
|
||||
#[arg(help("Meta plugins to use when saving items"))]
|
||||
pub meta_plugins: Vec<String>,
|
||||
|
||||
#[arg(help_heading("Item Options"), long, env("KEEP_FILTERS"))]
|
||||
#[arg(help("Filter string to apply to content when getting items"))]
|
||||
pub filters: Option<String>,
|
||||
}
|
||||
|
||||
/// Struct for general options, including verbosity, paths, and output settings.
|
||||
#[derive(Parser, Debug, Default, Clone)]
|
||||
pub struct OptionsArgs {
|
||||
#[arg(long, env("KEEP_CONFIG"))]
|
||||
#[arg(help("Specify the configuration file to use"))]
|
||||
pub config: Option<PathBuf>,
|
||||
|
||||
#[arg(long, env("KEEP_DIR"))]
|
||||
#[arg(help("Specify the directory to use for storage"))]
|
||||
pub dir: Option<PathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
env("KEEP_LIST_FORMAT"),
|
||||
default_value("id,time,size,tags,meta:hostname")
|
||||
)]
|
||||
#[arg(help("A comma separated list of columns to display with --list"))]
|
||||
pub list_format: String,
|
||||
|
||||
#[arg(short('H'), long)]
|
||||
#[arg(help("Display file sizes with units"))]
|
||||
pub human_readable: bool,
|
||||
|
||||
#[arg(short, long, action = clap::ArgAction::Count, conflicts_with("quiet"))]
|
||||
#[arg(help("Increase message verbosity, can be given more than once"))]
|
||||
pub verbose: u8,
|
||||
|
||||
#[arg(short, long)]
|
||||
#[arg(help("Do not show any messages"))]
|
||||
pub quiet: bool,
|
||||
|
||||
#[arg(long, value_enum, default_value("table"))]
|
||||
#[arg(help("Output format (only works with --info, --status, --list)"))]
|
||||
pub output_format: Option<String>,
|
||||
|
||||
#[arg(long, env("KEEP_SERVER_PASSWORD"))]
|
||||
#[arg(help("Password for server authentication (requires --server)"))]
|
||||
pub server_password: Option<String>,
|
||||
|
||||
#[arg(long, env("KEEP_SERVER_PASSWORD_HASH"))]
|
||||
#[arg(help("Password hash for server authentication (requires --server)"))]
|
||||
pub server_password_hash: Option<String>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help("Force output even when binary data would be sent to a TTY")
|
||||
)]
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
/// Enum for representing either a number (item ID) or a string (tag).
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum NumberOrString {
|
||||
Number(i64),
|
||||
Str(String),
|
||||
}
|
||||
|
||||
impl FromStr for NumberOrString {
|
||||
type Err = Error;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(s.parse::<i64>()
|
||||
.map(NumberOrString::Number)
|
||||
.unwrap_or_else(|_| NumberOrString::Str(s.to_string())))
|
||||
}
|
||||
}
|
||||
|
||||
/// Validates the parsed arguments based on mode constraints.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Result<(), String>` - Ok if valid, or an error message string.
|
||||
impl Args {
|
||||
/// Validate the arguments based on the selected mode
|
||||
pub fn validate(&self) -> Result<(), String> {
|
||||
// Check if --delete is used and ids_or_tags is empty
|
||||
if self.mode.delete && self.ids_or_tags.is_empty() {
|
||||
return Err("At least one ID is required when using --delete".to_string());
|
||||
}
|
||||
|
||||
// Check if --delete is used and any of the ids_or_tags are tags (strings)
|
||||
if self.mode.delete {
|
||||
for item in &self.ids_or_tags {
|
||||
if let NumberOrString::Str(_) = item {
|
||||
return Err("Tags are not supported for --delete, only IDs".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
130
src/common/binary_detection.rs
Normal file
130
src/common/binary_detection.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
use crate::services::async_item_service::AsyncItemService;
|
||||
use crate::services::error::CoreError;
|
||||
use axum::http::StatusCode;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Check if content is binary when allow_binary is false
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_service` - Reference to the async item service
|
||||
/// * `item_id` - The ID of the item to check
|
||||
/// * `metadata` - Metadata associated with the item
|
||||
/// * `allow_binary` - Whether binary content is allowed
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(), StatusCode>` -
|
||||
/// * `Ok(())` if binary content is allowed or content is not binary
|
||||
/// * `Err(StatusCode::BAD_REQUEST)` if binary content is not allowed and content is binary
|
||||
/// Check if content is binary when allow_binary is false
|
||||
///
|
||||
/// Validates whether binary content is permitted for the item. If not allowed and content
|
||||
/// is detected as binary, returns a bad request status. Uses metadata or streams content
|
||||
/// for detection if needed.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_service` - Reference to the async item service for content access.
|
||||
/// * `item_id` - The ID of the item to check.
|
||||
/// * `metadata` - Metadata associated with the item (checked for "text" key).
|
||||
/// * `allow_binary` - Whether binary content is allowed (bypasses check if true).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(), StatusCode>` -
|
||||
/// * `Ok(())` if binary content is allowed or content is not binary.
|
||||
/// * `Err(StatusCode::BAD_REQUEST)` if binary content is not allowed and content is binary.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Propagates `StatusCode` for validation failures.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// // If allow_binary = false and content is text
|
||||
/// check_binary_content_allowed(&service, 1, &metadata, false)?;
|
||||
/// // Succeeds
|
||||
///
|
||||
/// // If allow_binary = false and content is binary
|
||||
/// // Returns Err(StatusCode::BAD_REQUEST)
|
||||
/// ```
|
||||
pub async fn check_binary_content_allowed(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
allow_binary: bool,
|
||||
) -> Result<(), StatusCode> {
|
||||
if !allow_binary {
|
||||
let is_binary = is_content_binary(item_service, item_id, metadata).await?;
|
||||
if is_binary {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper function to determine if content is binary
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_service` - Reference to the async item service
|
||||
/// * `item_id` - The ID of the item to check
|
||||
/// * `metadata` - Metadata associated with the item
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<bool, StatusCode>` -
|
||||
/// * `Ok(true)` if content is binary
|
||||
/// * `Ok(false)` if content is text
|
||||
/// * `Err(StatusCode)` if an error occurs during checking
|
||||
/// Helper function to determine if content is binary
|
||||
///
|
||||
/// Checks existing "text" metadata first; if absent or unset, streams and analyzes
|
||||
/// the content to detect binary nature. Logs warnings on detection failures.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_service` - Reference to the async item service for content access.
|
||||
/// * `item_id` - The ID of the item to check.
|
||||
/// * `metadata` - Metadata associated with the item (checked for "text" key).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<bool, StatusCode>` -
|
||||
/// * `Ok(true)` if content is binary.
|
||||
/// * `Ok(false)` if content is text.
|
||||
/// * `Err(StatusCode)` if an error occurs during checking (e.g., INTERNAL_SERVER_ERROR).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `StatusCode::INTERNAL_SERVER_ERROR` if content access fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let is_bin = is_content_binary(&service, 1, &metadata).await?;
|
||||
/// assert!(is_bin == false); // For text content
|
||||
/// ```
|
||||
pub async fn is_content_binary(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
) -> Result<bool, StatusCode> {
|
||||
if let Some(text_val) = metadata.get("text") {
|
||||
Ok(text_val == "false")
|
||||
} else {
|
||||
// If text metadata isn't set, we need to check the content using streaming approach
|
||||
match item_service.get_item_content_info_streaming(
|
||||
item_id,
|
||||
None
|
||||
).await {
|
||||
Ok((_, _, is_binary)) => Ok(is_binary),
|
||||
Err(e) => {
|
||||
log::warn!("Failed to get content info for binary check for item {}: {}", item_id, e);
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::io::Read;
|
||||
|
||||
/// Detect if data is binary or text
|
||||
/// Returns true if data is likely binary, false if likely text
|
||||
pub fn is_binary(data: &[u8]) -> bool {
|
||||
@@ -13,11 +11,11 @@ pub fn is_binary(data: &[u8]) -> bool {
|
||||
}
|
||||
|
||||
// Check for UTF-16 BOM (text)
|
||||
if data.len() >= 2 {
|
||||
if (data[0] == 0xFF && data[1] == 0xFE) || (data[0] == 0xFE && data[1] == 0xFF) {
|
||||
if data.len() >= 2
|
||||
&& ((data[0] == 0xFF && data[1] == 0xFE) || (data[0] == 0xFE && data[1] == 0xFF))
|
||||
{
|
||||
return false; // UTF-16 with BOM is text
|
||||
}
|
||||
}
|
||||
|
||||
// Check for UTF-8 BOM (text)
|
||||
if data.len() >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF {
|
||||
@@ -60,7 +58,6 @@ fn has_binary_signature(data: &[u8]) -> bool {
|
||||
(&[0x4D, 0x4D, 0x00, 0x2A], 4), // TIFF (big endian)
|
||||
(&[0x52, 0x49, 0x46, 0x46], 4), // WebP (RIFF container)
|
||||
(&[0x00, 0x00, 0x00, 0x0C, 0x6A, 0x50, 0x20, 0x20], 8), // JPEG 2000
|
||||
|
||||
// Audio/Video formats
|
||||
(&[0x49, 0x44, 0x33], 3), // MP3 with ID3v2
|
||||
(&[0xFF, 0xFB], 2), // MP3
|
||||
@@ -71,7 +68,6 @@ fn has_binary_signature(data: &[u8]) -> bool {
|
||||
(&[0x52, 0x49, 0x46, 0x46], 4), // WAV/AVI (RIFF)
|
||||
(&[0x46, 0x4C, 0x56], 3), // FLV
|
||||
(&[0x1A, 0x45, 0xDF, 0xA3], 4), // MKV/WebM
|
||||
|
||||
// Archive formats
|
||||
(&[0x50, 0x4B, 0x03, 0x04], 4), // ZIP
|
||||
(&[0x50, 0x4B, 0x05, 0x06], 4), // ZIP (empty)
|
||||
@@ -86,13 +82,11 @@ fn has_binary_signature(data: &[u8]) -> bool {
|
||||
(&[0x1F, 0x9D], 2), // LZW compressed
|
||||
(&[0x1F, 0xA0], 2), // LZH compressed
|
||||
(&[0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C], 6), // 7-Zip
|
||||
|
||||
// Document formats
|
||||
(&[0x25, 0x50, 0x44, 0x46], 4), // PDF
|
||||
(&[0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1], 8), // MS Office (OLE)
|
||||
(&[0x50, 0x4B, 0x03, 0x04], 4), // Office Open XML (also ZIP)
|
||||
(&[0x7B, 0x5C, 0x72, 0x74, 0x66], 5), // RTF
|
||||
|
||||
// Executables and object files
|
||||
(&[0x7F, 0x45, 0x4C, 0x46], 4), // ELF
|
||||
(&[0x4D, 0x5A], 2), // Windows PE/DOS
|
||||
@@ -103,26 +97,34 @@ fn has_binary_signature(data: &[u8]) -> bool {
|
||||
(&[0xCF, 0xFA, 0xED, 0xFE], 4), // Mach-O 64-bit (big endian)
|
||||
(&[0xCA, 0xFE, 0xBA, 0xBE], 4), // Java class file
|
||||
(&[0xDE, 0xC0, 0x17, 0x0B], 4), // Dalvik executable
|
||||
|
||||
// Database formats
|
||||
(&[0x53, 0x51, 0x4C, 0x69, 0x74, 0x65, 0x20, 0x66, 0x6F, 0x72, 0x6D, 0x61, 0x74, 0x20, 0x33, 0x00], 16), // SQLite
|
||||
(
|
||||
&[
|
||||
0x53, 0x51, 0x4C, 0x69, 0x74, 0x65, 0x20, 0x66, 0x6F, 0x72, 0x6D, 0x61, 0x74, 0x20,
|
||||
0x33, 0x00,
|
||||
],
|
||||
16,
|
||||
), // SQLite
|
||||
(&[0x00, 0x01, 0x00, 0x00], 4), // Palm Database
|
||||
|
||||
// Font formats
|
||||
(&[0x00, 0x01, 0x00, 0x00, 0x00], 5), // TrueType
|
||||
(&[0x4F, 0x54, 0x54, 0x4F], 4), // OpenType
|
||||
(&[0x77, 0x4F, 0x46, 0x46], 4), // WOFF
|
||||
(&[0x77, 0x4F, 0x46, 0x32], 4), // WOFF2
|
||||
|
||||
// Virtual machine formats
|
||||
(&[0x76, 0x6D, 0x64, 0x6B], 4), // VMDK
|
||||
(&[0x3C, 0x3C, 0x3C, 0x20, 0x4F, 0x72, 0x61, 0x63, 0x6C, 0x65, 0x20, 0x56, 0x4D, 0x20, 0x56, 0x69, 0x72, 0x74, 0x75, 0x61, 0x6C, 0x42, 0x6F, 0x78, 0x20, 0x44, 0x69, 0x73, 0x6B, 0x20, 0x49, 0x6D, 0x61, 0x67, 0x65, 0x20, 0x3E, 0x3E, 0x3E], 39), // VirtualBox VDI
|
||||
|
||||
(
|
||||
&[
|
||||
0x3C, 0x3C, 0x3C, 0x20, 0x4F, 0x72, 0x61, 0x63, 0x6C, 0x65, 0x20, 0x56, 0x4D, 0x20,
|
||||
0x56, 0x69, 0x72, 0x74, 0x75, 0x61, 0x6C, 0x42, 0x6F, 0x78, 0x20, 0x44, 0x69, 0x73,
|
||||
0x6B, 0x20, 0x49, 0x6D, 0x61, 0x67, 0x65, 0x20, 0x3E, 0x3E, 0x3E,
|
||||
],
|
||||
39,
|
||||
), // VirtualBox VDI
|
||||
// Disk image formats
|
||||
(&[0xEB, 0x3C, 0x90], 3), // FAT12/16/32
|
||||
(&[0xEB, 0x58, 0x90], 3), // FAT32
|
||||
(&[0x55, 0xAA], 2), // Boot sector (at offset 510)
|
||||
|
||||
// Other binary formats
|
||||
(&[0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A], 8), // AR archive
|
||||
(&[0x78, 0x01], 2), // zlib (default compression)
|
||||
@@ -151,18 +153,30 @@ fn looks_like_utf16(data: &[u8]) -> bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut zero_count = 0;
|
||||
let pairs = data.len() / 2;
|
||||
// Check if it could be UTF-16 by looking at null patterns
|
||||
let mut null_pairs = 0;
|
||||
let max_checks = std::cmp::min(data.len() / 2, 50); // Check up to 50 character pairs
|
||||
|
||||
// Check if every other byte is zero (indicating UTF-16)
|
||||
for i in 0..pairs {
|
||||
for i in 0..max_checks {
|
||||
if data[i * 2 + 1] == 0 {
|
||||
zero_count += 1;
|
||||
null_pairs += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// If more than 50% of odd positions are zero, might be UTF-16
|
||||
zero_count as f64 / pairs as f64 > 0.5
|
||||
// If most high bytes are zero, it's likely UTF-16
|
||||
if max_checks > 0 && null_pairs as f64 / max_checks as f64 > 0.7 {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Also check the reverse pattern (little-endian UTF-16)
|
||||
let mut null_pairs_reverse = 0;
|
||||
for i in 0..max_checks {
|
||||
if i * 2 + 1 < data.len() && data[i * 2] == 0 {
|
||||
null_pairs_reverse += 1;
|
||||
}
|
||||
}
|
||||
|
||||
null_pairs_reverse as f64 / max_checks as f64 > 0.7
|
||||
}
|
||||
|
||||
/// Check if data looks like a TAR archive
|
||||
@@ -185,8 +199,8 @@ fn looks_like_tar(data: &[u8]) -> bool {
|
||||
}
|
||||
|
||||
// Check checksum field (should be octal digits or spaces)
|
||||
for i in 148..156 {
|
||||
if data[i] != 0 && (data[i] < b'0' || data[i] > b'7') && data[i] != b' ' {
|
||||
for &b in &data[148..156] {
|
||||
if b != 0 && (b < b'0' || b > b'7') && b != b' ' {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -200,18 +214,18 @@ fn looks_like_tar(data: &[u8]) -> bool {
|
||||
}
|
||||
|
||||
// Additional heuristic: check if the structure looks reasonable
|
||||
let has_reasonable_structure =
|
||||
data[0] != 0 && // Filename starts
|
||||
data[100..108].iter().all(|&b| b == 0 || (b >= b'0' && b <= b'7') || b == b' '); // Mode field
|
||||
// Mode field
|
||||
|
||||
has_reasonable_structure
|
||||
data[0] != 0 && // Filename starts
|
||||
data[100..108].iter().all(|&b| b == 0 || (b'0'..=b'7').contains(&b) || b == b' ')
|
||||
}
|
||||
|
||||
/// Calculate the ratio of printable characters in the data
|
||||
fn calculate_printable_ratio(data: &[u8]) -> f64 {
|
||||
let printable_count = data.iter().filter(|&&b| {
|
||||
b.is_ascii_graphic() || b.is_ascii_whitespace()
|
||||
}).count();
|
||||
let printable_count = data
|
||||
.iter()
|
||||
.filter(|&&b| b.is_ascii_graphic() || b.is_ascii_whitespace())
|
||||
.count();
|
||||
|
||||
printable_count as f64 / data.len() as f64
|
||||
}
|
||||
7
src/common/mod.rs
Normal file
7
src/common/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
pub mod is_binary;
|
||||
|
||||
/// Detects if data is binary or text based on signatures and printable ratios.
|
||||
pub mod status;
|
||||
|
||||
/// Standard buffer size for I/O operations (8KB)
|
||||
pub const PIPESIZE: usize = 8192;
|
||||
188
src/common/status.rs
Normal file
188
src/common/status.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use std::path::PathBuf;
|
||||
use strum::IntoEnumIterator;
|
||||
#[cfg(feature = "server")]
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::compression_engine::{CompressionType, get_compression_engine};
|
||||
use crate::meta_plugin::MetaPluginType;
|
||||
|
||||
use crate::filter_plugin::FilterOption;
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize, Clone)]
|
||||
#[cfg_attr(feature = "server", derive(ToSchema))]
|
||||
pub struct FilterPluginInfo {
|
||||
pub name: String,
|
||||
pub options: Vec<FilterOption>,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
#[cfg_attr(feature = "server", derive(ToSchema))]
|
||||
pub struct StatusInfo {
|
||||
pub paths: PathInfo,
|
||||
pub compression: Vec<CompressionInfo>,
|
||||
pub meta_plugins: std::collections::HashMap<String, MetaPluginInfo>,
|
||||
pub enabled_meta_plugins: Vec<String>,
|
||||
pub filter_plugins: Vec<FilterPluginInfo>,
|
||||
pub configured_meta_plugins: Option<Vec<crate::config::MetaPluginConfig>>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
#[cfg_attr(feature = "server", derive(ToSchema))]
|
||||
pub struct PathInfo {
|
||||
pub data: String,
|
||||
pub database: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct CompressionInfo {
|
||||
#[serde(rename = "type")]
|
||||
pub compression_type: String,
|
||||
pub found: bool,
|
||||
pub default: bool,
|
||||
pub binary: String,
|
||||
pub compress: String,
|
||||
pub decompress: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize, Clone)]
|
||||
#[cfg_attr(feature = "server", derive(ToSchema))]
|
||||
pub struct MetaPluginInfo {
|
||||
pub meta_name: String,
|
||||
pub outputs: std::collections::HashMap<String, serde_yaml::Value>,
|
||||
pub options: std::collections::HashMap<String, serde_yaml::Value>,
|
||||
}
|
||||
|
||||
pub fn generate_status_info(
|
||||
data_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
enabled_meta_plugins: &[MetaPluginType],
|
||||
enabled_compression_type: Option<CompressionType>,
|
||||
) -> StatusInfo {
|
||||
log::debug!("STATUS: Starting status info generation");
|
||||
let path_info = PathInfo {
|
||||
data: data_path
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.expect("Unable to convert data path to string"),
|
||||
database: db_path
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.expect("Unable to convert DB path to string"),
|
||||
};
|
||||
|
||||
let _default_type = crate::compression_engine::default_compression_type();
|
||||
let mut compression_info = Vec::new();
|
||||
|
||||
// Sort compression types by their string representation
|
||||
let mut sorted_compression_types: Vec<CompressionType> = CompressionType::iter().collect();
|
||||
sorted_compression_types.sort_by_key(|ct| ct.to_string());
|
||||
|
||||
for compression_type in sorted_compression_types {
|
||||
let (binary, compress, decompress, supported) =
|
||||
match get_compression_engine(compression_type.clone()) {
|
||||
Ok(engine) => {
|
||||
let supp = engine.is_supported();
|
||||
if supp && engine.is_internal() {
|
||||
(
|
||||
"<INTERNAL>".to_string(),
|
||||
"".to_string(),
|
||||
"".to_string(),
|
||||
supp,
|
||||
)
|
||||
} else if supp {
|
||||
let (b, c, d) = engine.get_status_info();
|
||||
(b, c, d, supp)
|
||||
} else {
|
||||
(
|
||||
"<UNSUPPORTED>".to_string(),
|
||||
"".to_string(),
|
||||
"".to_string(),
|
||||
supp,
|
||||
)
|
||||
}
|
||||
}
|
||||
Err(_) => (
|
||||
"<UNSUPPORTED>".to_string(),
|
||||
"".to_string(),
|
||||
"".to_string(),
|
||||
false,
|
||||
),
|
||||
};
|
||||
|
||||
let is_enabled = enabled_compression_type
|
||||
.as_ref()
|
||||
.is_some_and(|ct| *ct == compression_type);
|
||||
|
||||
compression_info.push(CompressionInfo {
|
||||
compression_type: compression_type.to_string(),
|
||||
found: supported,
|
||||
default: is_enabled,
|
||||
binary,
|
||||
compress,
|
||||
decompress,
|
||||
});
|
||||
}
|
||||
|
||||
let mut meta_plugins_map = std::collections::HashMap::new();
|
||||
let mut enabled_meta_plugins_vec = Vec::new();
|
||||
|
||||
// Sort meta plugin types by their string representation to avoid creating plugins just for sorting
|
||||
let mut sorted_meta_plugins: Vec<MetaPluginType> = MetaPluginType::iter().collect();
|
||||
sorted_meta_plugins.sort_by_key(|meta_plugin_type| meta_plugin_type.to_string());
|
||||
|
||||
for meta_plugin_type in sorted_meta_plugins {
|
||||
log::debug!(
|
||||
"STATUS: Processing meta plugin type: {:?}",
|
||||
meta_plugin_type
|
||||
);
|
||||
log::debug!("STATUS: About to call get_meta_plugin");
|
||||
let meta_plugin = crate::meta_plugin::get_meta_plugin(meta_plugin_type.clone(), None, None);
|
||||
log::debug!("STATUS: Created meta plugin instance");
|
||||
|
||||
// Get meta name first to avoid borrowing issues
|
||||
log::debug!("STATUS: Getting meta name...");
|
||||
let meta_name = meta_plugin.meta_type().to_string();
|
||||
log::debug!("STATUS: Got meta name: {}", meta_name);
|
||||
|
||||
// Check if this plugin is enabled
|
||||
let is_enabled = enabled_meta_plugins.contains(&meta_plugin_type);
|
||||
if is_enabled {
|
||||
enabled_meta_plugins_vec.push(meta_name.clone());
|
||||
}
|
||||
|
||||
// Create a display of outputs for status - use configured outputs if available, otherwise defaults
|
||||
let outputs_display = if meta_plugin.outputs().is_empty() {
|
||||
// No configured outputs, use defaults
|
||||
let mut default_outputs = std::collections::HashMap::new();
|
||||
for output_name in meta_plugin.default_outputs() {
|
||||
default_outputs.insert(output_name.clone(), serde_yaml::Value::String(output_name));
|
||||
}
|
||||
default_outputs
|
||||
} else {
|
||||
// Use configured outputs
|
||||
meta_plugin.outputs().clone()
|
||||
};
|
||||
|
||||
// Get options
|
||||
let options = meta_plugin.options().clone();
|
||||
|
||||
meta_plugins_map.insert(
|
||||
meta_name.clone(),
|
||||
MetaPluginInfo {
|
||||
meta_name,
|
||||
outputs: outputs_display,
|
||||
options,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
StatusInfo {
|
||||
paths: path_info,
|
||||
compression: compression_info,
|
||||
meta_plugins: meta_plugins_map,
|
||||
enabled_meta_plugins: enabled_meta_plugins_vec,
|
||||
filter_plugins: Vec::new(),
|
||||
configured_meta_plugins: None,
|
||||
}
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use std::io;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use log::*;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
extern crate enum_map;
|
||||
use enum_map::enum_map;
|
||||
use enum_map::{Enum, EnumMap};
|
||||
|
||||
pub mod gzip;
|
||||
pub mod lz4;
|
||||
pub mod none;
|
||||
pub mod program;
|
||||
|
||||
use crate::compression_engine::gzip::CompressionEngineGZip;
|
||||
use crate::compression_engine::lz4::CompressionEngineLZ4;
|
||||
use crate::compression_engine::none::CompressionEngineNone;
|
||||
use crate::compression_engine::program::CompressionEngineProgram;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, strum::EnumIter, strum::Display, strum::EnumString, Enum)]
|
||||
#[strum(ascii_case_insensitive)]
|
||||
pub enum CompressionType {
|
||||
LZ4,
|
||||
GZip,
|
||||
BZip2,
|
||||
XZ,
|
||||
ZStd,
|
||||
None,
|
||||
}
|
||||
|
||||
pub trait CompressionEngine {
|
||||
fn open(&self, file_path: PathBuf) -> Result<Box<dyn Read>>;
|
||||
fn create(&self, file_path: PathBuf) -> Result<Box<dyn Write>>;
|
||||
|
||||
fn is_supported(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn copy(&self, file_path: PathBuf, writer: &mut dyn Write) -> Result<()> {
|
||||
let mut reader = self.open(file_path)?;
|
||||
io::copy(&mut reader, writer)?;
|
||||
writer.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cat(&self, file_path: PathBuf) -> Result<()> {
|
||||
let mut stdout = io::stdout().lock();
|
||||
self.copy(file_path, &mut stdout)
|
||||
}
|
||||
|
||||
fn size(&self, file_path: PathBuf) -> Result<usize> {
|
||||
let mut reader = self.open(file_path)?;
|
||||
let mut buffer = [0; libc::BUFSIZ as usize];
|
||||
let mut size: usize = 0;
|
||||
|
||||
loop {
|
||||
let n = reader.read(&mut buffer[..libc::BUFSIZ as usize])?;
|
||||
if n == 0 {
|
||||
debug!("COMPRESSION: EOF");
|
||||
break;
|
||||
}
|
||||
|
||||
size += n;
|
||||
}
|
||||
|
||||
Ok(size)
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref COMPRESSION_PROGRAMS: EnumMap<CompressionType, Option<CompressionEngineProgram>> = enum_map! {
|
||||
CompressionType::LZ4 => None,
|
||||
CompressionType::GZip => None,
|
||||
CompressionType::BZip2 => {
|
||||
let program = CompressionEngineProgram::new("bzip2", vec!["-qcf"], vec!["-dcf"]);
|
||||
if program.supported { Some(program) } else { None }
|
||||
},
|
||||
CompressionType::XZ => {
|
||||
let program = CompressionEngineProgram::new("xz", vec!["-qcf"], vec!["-dcf"]);
|
||||
if program.supported { Some(program) } else { None }
|
||||
},
|
||||
CompressionType::ZStd => {
|
||||
let program = CompressionEngineProgram::new("zstd", vec!["-qcf"], vec!["-dcf"]);
|
||||
if program.supported { Some(program) } else { None }
|
||||
},
|
||||
CompressionType::None => None
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_compression_engine(
|
||||
compression_type: CompressionType,
|
||||
) -> Result<Box<dyn CompressionEngine>> {
|
||||
match compression_type {
|
||||
CompressionType::LZ4 => Ok(Box::new(CompressionEngineLZ4::new())),
|
||||
CompressionType::GZip => Ok(Box::new(CompressionEngineGZip::new())),
|
||||
CompressionType::None => Ok(Box::new(CompressionEngineNone::new())),
|
||||
compression_type => Ok(Box::new(
|
||||
COMPRESSION_PROGRAMS[compression_type.clone()]
|
||||
.clone()
|
||||
.unwrap(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_compression_type() -> CompressionType {
|
||||
let mut default = CompressionType::None;
|
||||
for compression_type in CompressionType::iter() {
|
||||
let compression_engine =
|
||||
get_compression_engine(compression_type.clone()).expect("Missing engine");
|
||||
if compression_engine.is_supported() {
|
||||
default = compression_type;
|
||||
break;
|
||||
}
|
||||
}
|
||||
default
|
||||
}
|
||||
@@ -1,30 +1,47 @@
|
||||
#[cfg(feature = "gzip")]
|
||||
use anyhow::Result;
|
||||
#[cfg(feature = "gzip")]
|
||||
use log::*;
|
||||
#[cfg(feature = "gzip")]
|
||||
use std::fs::File;
|
||||
#[cfg(feature = "gzip")]
|
||||
use std::io;
|
||||
#[cfg(feature = "gzip")]
|
||||
use std::io::{Read, Write};
|
||||
#[cfg(feature = "gzip")]
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
use flate2::Compression;
|
||||
#[cfg(feature = "gzip")]
|
||||
use flate2::read::GzDecoder;
|
||||
#[cfg(feature = "gzip")]
|
||||
use flate2::write::GzEncoder;
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
use crate::compression_engine::CompressionEngine;
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Default)]
|
||||
pub struct CompressionEngineGZip {}
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
impl CompressionEngineGZip {
|
||||
pub fn new() -> CompressionEngineGZip {
|
||||
CompressionEngineGZip {}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
impl CompressionEngine for CompressionEngineGZip {
|
||||
fn is_supported(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn get_status_info(&self) -> (String, String, String) {
|
||||
("<INTERNAL>".to_string(), "".to_string(), "".to_string())
|
||||
}
|
||||
|
||||
fn open(&self, file_path: PathBuf) -> Result<Box<dyn Read>> {
|
||||
debug!("COMPRESSION: Opening {:?} using {:?}", file_path, *self);
|
||||
|
||||
@@ -33,19 +50,26 @@ impl CompressionEngine for CompressionEngineGZip {
|
||||
}
|
||||
|
||||
fn create(&self, file_path: PathBuf) -> Result<Box<dyn Write>> {
|
||||
debug!("COMPRESSION: Writting to {:?} using {:?}", file_path, *self);
|
||||
debug!("COMPRESSION: Writing to {:?} using {:?}", file_path, *self);
|
||||
|
||||
let file = File::create(file_path)?;
|
||||
let gzip_write = GzEncoder::new(file, Compression::default());
|
||||
|
||||
Ok(Box::new(AutoFinishGzEncoder::new(gzip_write)))
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Box<dyn CompressionEngine> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
#[derive(Debug)]
|
||||
pub struct AutoFinishGzEncoder<W: Write> {
|
||||
encoder: Option<GzEncoder<W>>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
impl<W: Write> AutoFinishGzEncoder<W> {
|
||||
fn new(gz_encoder: GzEncoder<W>) -> AutoFinishGzEncoder<W> {
|
||||
AutoFinishGzEncoder {
|
||||
@@ -54,15 +78,19 @@ impl<W: Write> AutoFinishGzEncoder<W> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
impl<W: Write> Drop for AutoFinishGzEncoder<W> {
|
||||
fn drop(&mut self) {
|
||||
if let Some(encoder) = self.encoder.take() {
|
||||
debug!("COMPRESSION: Finishing");
|
||||
let _ = encoder.finish();
|
||||
if let Err(e) = encoder.finish() {
|
||||
warn!("Failed to finish GZip encoder: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
impl<W: Write> Write for AutoFinishGzEncoder<W> {
|
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||
self.encoder.as_mut().unwrap().write(buf)
|
||||
|
||||
@@ -27,11 +27,15 @@ impl CompressionEngine for CompressionEngineLZ4 {
|
||||
}
|
||||
|
||||
fn create(&self, file_path: PathBuf) -> Result<Box<dyn Write>> {
|
||||
debug!("COMPRESSION: Writting to {:?} using {:?}", file_path, *self);
|
||||
debug!("COMPRESSION: Writing to {:?} using {:?}", file_path, *self);
|
||||
|
||||
let file = File::create(file_path)?;
|
||||
let lz4_write = FrameEncoder::new(file).auto_finish();
|
||||
|
||||
Ok(Box::new(lz4_write))
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Box<dyn CompressionEngine> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
229
src/compression_engine/mod.rs
Normal file
229
src/compression_engine/mod.rs
Normal file
@@ -0,0 +1,229 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use std::io;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
use strum::IntoEnumIterator;
|
||||
use strum::{Display, EnumIter, EnumString};
|
||||
|
||||
use log::*;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
extern crate enum_map;
|
||||
use enum_map::enum_map;
|
||||
use enum_map::{Enum, EnumMap};
|
||||
|
||||
pub mod gzip;
|
||||
pub mod lz4;
|
||||
pub mod none;
|
||||
pub mod program;
|
||||
|
||||
use crate::compression_engine::program::CompressionEngineProgram;
|
||||
|
||||
/// Enum representing different compression types supported by the system.
|
||||
///
|
||||
/// This enum defines all supported compression formats that can be used for
|
||||
/// storing and retrieving compressed items. Each variant corresponds to a
|
||||
/// specific compression algorithm or no compression.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use keep::compression_engine::CompressionType;
|
||||
/// assert_eq!(CompressionType::GZip.to_string(), "gzip");
|
||||
/// ```
|
||||
#[derive(Debug, Eq, PartialEq, Clone, EnumIter, Display, EnumString, enum_map::Enum)]
|
||||
#[strum(ascii_case_insensitive)]
|
||||
pub enum CompressionType {
|
||||
LZ4,
|
||||
GZip,
|
||||
BZip2,
|
||||
XZ,
|
||||
ZStd,
|
||||
None,
|
||||
}
|
||||
|
||||
/// Trait defining the interface for compression engines.
|
||||
///
|
||||
/// This trait provides a unified API for different compression implementations.
|
||||
/// Implementors handle reading from and writing to compressed files, as well as
|
||||
/// utility operations like copying decompressed content or calculating sizes.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Methods may return `anyhow::Error` for I/O failures, unsupported formats,
|
||||
/// or invalid file paths.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```ignore
|
||||
/// // Example usage would depend on a concrete implementation
|
||||
/// use keep::compression_engine::CompressionEngine;
|
||||
/// let engine = /* some engine */;
|
||||
/// let reader = engine.open("file.gz".into()).unwrap();
|
||||
/// ```
|
||||
pub trait CompressionEngine: Send + Sync {
|
||||
/// Opens a compressed file for reading.
|
||||
///
|
||||
/// Creates a reader that transparently decompresses the file contents as they are read.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `file_path` - Path to the compressed file.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<Box<dyn Read>>` - A boxed reader that decompresses the file on read,
|
||||
/// or an error if the file cannot be opened or is invalid.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the file does not exist, is not a valid compressed file,
|
||||
/// or if decompression fails.
|
||||
fn open(&self, file_path: PathBuf) -> Result<Box<dyn Read>>;
|
||||
|
||||
/// Creates a new compressed file for writing.
|
||||
///
|
||||
/// Creates a writer that transparently compresses data as it is written.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `file_path` - Path where the compressed file will be created.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<Box<dyn Write>>` - A boxed writer that compresses data on write,
|
||||
/// or an error if the file cannot be created.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the path is invalid or if there are permission issues.
|
||||
fn create(&self, file_path: PathBuf) -> Result<Box<dyn Write>>;
|
||||
|
||||
/// Checks if this compression engine is supported on the current system.
|
||||
///
|
||||
/// Some compression types may require external programs or features to be enabled.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `bool` - True if supported, false otherwise.
|
||||
fn is_supported(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Checks if this compression engine is internal (built-in) or external (program-based).
|
||||
///
|
||||
/// Internal engines use Rust implementations without external dependencies.
|
||||
/// External engines rely on system programs.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `bool` - True if internal, false if external.
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Returns status information for this compression engine.
|
||||
///
|
||||
/// For internal engines, returns ("<INTERNAL>", "", "").
|
||||
/// For external program engines, returns (program_binary, compress_args, decompress_args).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A tuple of (binary, compress_command, decompress_command).
|
||||
fn get_status_info(&self) -> (String, String, String) {
|
||||
("<INTERNAL>".to_string(), "".to_string(), "".to_string())
|
||||
}
|
||||
|
||||
/// Copies decompressed content from a file to a writer.
|
||||
///
|
||||
/// Reads the compressed file and writes the decompressed content to the provided writer.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `file_path` - Path to the compressed file.
|
||||
/// * `writer` - Writer to receive decompressed content.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<()>` - Success if the copy completes, or an error.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Propagates errors from opening the file or copying data.
|
||||
fn copy(&self, file_path: PathBuf, writer: &mut dyn Write) -> Result<()> {
|
||||
let mut reader = self.open(file_path)?;
|
||||
io::copy(&mut reader, writer)?;
|
||||
writer.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this compression engine into a new boxed instance.
|
||||
///
|
||||
/// Required for dynamic trait object cloning.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn CompressionEngine>` clone of this engine.
|
||||
fn clone_box(&self) -> Box<dyn CompressionEngine>;
|
||||
}
|
||||
|
||||
impl Clone for Box<dyn CompressionEngine> {
|
||||
fn clone(&self) -> Self {
|
||||
self.as_ref().clone_box()
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref COMPRESSION_ENGINES: EnumMap<CompressionType, Box<dyn CompressionEngine>> = {
|
||||
let mut em = enum_map! {
|
||||
CompressionType::LZ4 => Box::new(crate::compression_engine::lz4::CompressionEngineLZ4::new()) as Box<dyn CompressionEngine>,
|
||||
CompressionType::GZip => Box::new(crate::compression_engine::program::CompressionEngineProgram::new(
|
||||
"gzip",
|
||||
vec!["-c"],
|
||||
vec!["-d", "-c"]
|
||||
)) as Box<dyn CompressionEngine>,
|
||||
CompressionType::BZip2 => Box::new(crate::compression_engine::program::CompressionEngineProgram::new(
|
||||
"bzip2",
|
||||
vec!["-c"],
|
||||
vec!["-d", "-c"]
|
||||
)) as Box<dyn CompressionEngine>,
|
||||
CompressionType::XZ => Box::new(crate::compression_engine::program::CompressionEngineProgram::new(
|
||||
"xz",
|
||||
vec!["-c"],
|
||||
vec!["-d", "-c"]
|
||||
)) as Box<dyn CompressionEngine>,
|
||||
CompressionType::ZStd => Box::new(crate::compression_engine::program::CompressionEngineProgram::new(
|
||||
"zstd",
|
||||
vec!["-c"],
|
||||
vec!["-d", "-c"]
|
||||
)) as Box<dyn CompressionEngine>,
|
||||
CompressionType::None => Box::new(crate::compression_engine::none::CompressionEngineNone::new()) as Box<dyn CompressionEngine>
|
||||
};
|
||||
|
||||
#[cfg(feature = "gzip")]
|
||||
{
|
||||
em[CompressionType::GZip] =
|
||||
Box::new(crate::compression_engine::gzip::CompressionEngineGZip::new())
|
||||
as Box<dyn CompressionEngine>;
|
||||
}
|
||||
|
||||
em
|
||||
};
|
||||
}
|
||||
|
||||
pub fn default_compression_type() -> CompressionType {
|
||||
CompressionType::LZ4
|
||||
}
|
||||
|
||||
pub fn get_compression_engine(ct: CompressionType) -> Result<Box<dyn CompressionEngine>> {
|
||||
let engine = &COMPRESSION_ENGINES[ct.clone()];
|
||||
if engine.is_supported() {
|
||||
Ok(engine.clone())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Compression engine for {} is not supported",
|
||||
ct.to_string()
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,12 @@ impl CompressionEngineNone {
|
||||
}
|
||||
|
||||
impl CompressionEngine for CompressionEngineNone {
|
||||
fn size(&self, file_path: PathBuf) -> Result<usize> {
|
||||
let item_file_metadata = file_path.metadata()?;
|
||||
Ok(item_file_metadata.len() as usize)
|
||||
fn is_supported(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn get_status_info(&self) -> (String, String, String) {
|
||||
("<INTERNAL>".to_string(), "".to_string(), "".to_string())
|
||||
}
|
||||
|
||||
fn open(&self, file_path: PathBuf) -> Result<Box<dyn Read>> {
|
||||
@@ -27,7 +30,11 @@ impl CompressionEngine for CompressionEngineNone {
|
||||
}
|
||||
|
||||
fn create(&self, file_path: PathBuf) -> Result<Box<dyn Write>> {
|
||||
debug!("COMPRESSION: Writting to {:?} using {:?}", file_path, *self);
|
||||
debug!("COMPRESSION: Writing to {:?} using {:?}", file_path, *self);
|
||||
Ok(Box::new(File::create(file_path)?))
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Box<dyn CompressionEngine> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use log::*;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::{Read, Write};
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::path::PathBuf;
|
||||
use std::process::{Child, Command, Stdio};
|
||||
use which::which;
|
||||
|
||||
use crate::compression_engine::CompressionEngine;
|
||||
|
||||
@@ -66,11 +64,12 @@ impl CompressionEngineProgram {
|
||||
compress: Vec<&str>,
|
||||
decompress: Vec<&str>,
|
||||
) -> CompressionEngineProgram {
|
||||
let program_path = get_program_path(program);
|
||||
let program_path = which(program);
|
||||
let supported = program_path.is_ok();
|
||||
|
||||
CompressionEngineProgram {
|
||||
program: program_path.unwrap_or(program.to_string()),
|
||||
program: program_path
|
||||
.map_or_else(|_| program.to_string(), |p| p.to_string_lossy().to_string()),
|
||||
compress: compress.iter().map(|s| s.to_string()).collect(),
|
||||
decompress: decompress.iter().map(|s| s.to_string()).collect(),
|
||||
supported,
|
||||
@@ -78,29 +77,23 @@ impl CompressionEngineProgram {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_program_path(program: &str) -> Result<String> {
|
||||
debug!("COMPRESSION: Looking for executable: {}", program);
|
||||
if let Ok(path) = env::var("PATH") {
|
||||
for p in path.split(':') {
|
||||
let p_str = format!("{}/{}", p, program);
|
||||
let stat = fs::metadata(p_str.clone());
|
||||
if let Ok(stat) = stat {
|
||||
let md = stat;
|
||||
let permissions = md.permissions();
|
||||
if md.is_file() && permissions.mode() & 0o111 != 0 {
|
||||
return Ok(p_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(anyhow!("Unable to find binary {} in PATH", program))
|
||||
}
|
||||
|
||||
impl CompressionEngine for CompressionEngineProgram {
|
||||
fn is_supported(&self) -> bool {
|
||||
self.supported
|
||||
}
|
||||
|
||||
fn is_internal(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn get_status_info(&self) -> (String, String, String) {
|
||||
(
|
||||
self.program.clone(),
|
||||
self.compress.join(" "),
|
||||
self.decompress.join(" "),
|
||||
)
|
||||
}
|
||||
|
||||
fn open(&self, file_path: PathBuf) -> Result<Box<dyn Read>> {
|
||||
debug!("COMPRESSION: Opening {:?} using {:?}", file_path, *self);
|
||||
|
||||
@@ -125,9 +118,10 @@ impl CompressionEngine for CompressionEngineProgram {
|
||||
args
|
||||
))?;
|
||||
|
||||
let stdout = process.stdout.take().ok_or_else(|| {
|
||||
anyhow!("Failed to capture stdout from child process")
|
||||
})?;
|
||||
let stdout = process
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("Failed to capture stdout from child process"))?;
|
||||
|
||||
Ok(Box::new(ProgramReader {
|
||||
process,
|
||||
@@ -159,13 +153,18 @@ impl CompressionEngine for CompressionEngineProgram {
|
||||
args
|
||||
))?;
|
||||
|
||||
let stdin = process.stdin.take().ok_or_else(|| {
|
||||
anyhow!("Failed to capture stdin from child process")
|
||||
})?;
|
||||
let stdin = process
|
||||
.stdin
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("Failed to capture stdin from child process"))?;
|
||||
|
||||
Ok(Box::new(ProgramWriter {
|
||||
process,
|
||||
stdin: Some(stdin),
|
||||
}))
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Box<dyn CompressionEngine> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
468
src/config.rs
Normal file
468
src/config.rs
Normal file
@@ -0,0 +1,468 @@
|
||||
use crate::args::Args;
|
||||
use anyhow::{Context, Result};
|
||||
use dirs;
|
||||
use log::{debug, error};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ColumnAlignment {
|
||||
#[default]
|
||||
Left,
|
||||
Right,
|
||||
Center,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ContentArrangement {
|
||||
#[default]
|
||||
Dynamic,
|
||||
DynamicFullWidth,
|
||||
Disabled,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum TableStyle {
|
||||
Ascii,
|
||||
Utf8,
|
||||
Utf8Full,
|
||||
#[default]
|
||||
Nothing,
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum TableColor {
|
||||
Black,
|
||||
Red,
|
||||
Green,
|
||||
Yellow,
|
||||
Blue,
|
||||
Magenta,
|
||||
Cyan,
|
||||
White,
|
||||
Gray,
|
||||
DarkRed,
|
||||
DarkGreen,
|
||||
DarkYellow,
|
||||
DarkBlue,
|
||||
DarkMagenta,
|
||||
DarkCyan,
|
||||
Rgb(u8, u8, u8),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum TableAttribute {
|
||||
Bold,
|
||||
Dim,
|
||||
Italic,
|
||||
Underlined,
|
||||
SlowBlink,
|
||||
RapidBlink,
|
||||
Reverse,
|
||||
Hidden,
|
||||
CrossedOut,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct TableConfig {
|
||||
#[serde(default)]
|
||||
pub style: TableStyle,
|
||||
#[serde(default)]
|
||||
pub modifiers: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub content_arrangement: ContentArrangement,
|
||||
#[serde(default)]
|
||||
pub truncation_indicator: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Default)]
|
||||
pub struct ColumnConfig {
|
||||
pub name: String,
|
||||
pub label: String,
|
||||
#[serde(default)]
|
||||
pub align: ColumnAlignment,
|
||||
#[serde(default)]
|
||||
pub max_len: Option<String>,
|
||||
#[serde(default)]
|
||||
pub fg_color: Option<TableColor>,
|
||||
#[serde(default)]
|
||||
pub bg_color: Option<TableColor>,
|
||||
#[serde(default)]
|
||||
pub attributes: Vec<TableAttribute>,
|
||||
#[serde(default)]
|
||||
pub padding: Option<(u16, u16)>,
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for ColumnConfig {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
struct Helper {
|
||||
name: String,
|
||||
label: Option<String>,
|
||||
#[serde(default)]
|
||||
align: ColumnAlignment,
|
||||
#[serde(default)]
|
||||
max_len: Option<String>,
|
||||
#[serde(default)]
|
||||
fg_color: Option<TableColor>,
|
||||
#[serde(default)]
|
||||
bg_color: Option<TableColor>,
|
||||
#[serde(default)]
|
||||
attributes: Vec<TableAttribute>,
|
||||
#[serde(default)]
|
||||
padding: Option<(u16, u16)>,
|
||||
}
|
||||
|
||||
let helper = Helper::deserialize(deserializer)?;
|
||||
let label = helper.label.unwrap_or_else(|| helper.name.clone());
|
||||
|
||||
Ok(ColumnConfig {
|
||||
name: helper.name,
|
||||
label,
|
||||
align: helper.align,
|
||||
max_len: helper.max_len,
|
||||
fg_color: helper.fg_color,
|
||||
bg_color: helper.bg_color,
|
||||
attributes: helper.attributes,
|
||||
padding: helper.padding,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct ServerConfig {
|
||||
pub address: Option<String>,
|
||||
pub port: Option<u16>,
|
||||
pub password_file: Option<PathBuf>,
|
||||
pub password: Option<String>,
|
||||
pub password_hash: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct CompressionPluginConfig {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[cfg_attr(feature = "server", derive(utoipa::ToSchema))]
|
||||
pub struct MetaPluginConfig {
|
||||
pub name: String,
|
||||
#[serde(default)]
|
||||
#[cfg_attr(feature = "server", schema(value_type = Object))]
|
||||
pub options: std::collections::HashMap<String, serde_yaml::Value>,
|
||||
#[serde(default)]
|
||||
#[cfg_attr(feature = "server", schema(value_type = Object))]
|
||||
pub outputs: std::collections::HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Unified settings that merges config file and CLI arguments
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Settings {
|
||||
#[serde(default)]
|
||||
pub dir: PathBuf,
|
||||
pub list_format: Vec<ColumnConfig>,
|
||||
#[serde(default)]
|
||||
pub table_config: TableConfig,
|
||||
#[serde(default)]
|
||||
pub human_readable: bool,
|
||||
pub output_format: Option<String>,
|
||||
#[serde(default)]
|
||||
pub quiet: bool,
|
||||
#[serde(default)]
|
||||
pub force: bool,
|
||||
pub server: Option<ServerConfig>,
|
||||
pub compression_plugin: Option<CompressionPluginConfig>,
|
||||
pub meta_plugins: Option<Vec<MetaPluginConfig>>,
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
/// Create unified settings from config and args with proper priority
|
||||
pub fn new(args: &Args, default_dir: PathBuf) -> Result<Self> {
|
||||
debug!(
|
||||
"CONFIG: Creating settings with default dir: {:?}",
|
||||
default_dir
|
||||
);
|
||||
|
||||
let config_path = if let Some(config_path) = &args.options.config {
|
||||
config_path.clone()
|
||||
} else if let Ok(env_config) = std::env::var("KEEP_CONFIG") {
|
||||
PathBuf::from(env_config)
|
||||
} else {
|
||||
let default_path = if let Ok(home_dir) = std::env::var("HOME") {
|
||||
let mut path = PathBuf::from(home_dir);
|
||||
path.push(".config");
|
||||
path.push("keep");
|
||||
path.push("config.yml");
|
||||
path
|
||||
} else {
|
||||
PathBuf::from("~/.config/keep/config.yml")
|
||||
};
|
||||
debug!("CONFIG: Using default config path: {:?}", default_path);
|
||||
default_path
|
||||
};
|
||||
|
||||
debug!("CONFIG: Using config path: {:?}", config_path);
|
||||
|
||||
let mut config_builder = config::Config::builder();
|
||||
|
||||
// Load config file if it exists
|
||||
if config_path.exists() {
|
||||
debug!("CONFIG: Loading config file: {:?}", config_path);
|
||||
config_builder =
|
||||
config_builder.add_source(config::File::from(config_path.clone()).required(false));
|
||||
} else {
|
||||
debug!("CONFIG: Config file does not exist: {:?}", config_path);
|
||||
}
|
||||
|
||||
// Add environment variables
|
||||
debug!("CONFIG: Adding environment variables");
|
||||
let env_source = config::Environment::with_prefix("KEEP")
|
||||
.separator("__")
|
||||
.ignore_empty(true);
|
||||
config_builder = config_builder.add_source(env_source);
|
||||
|
||||
// Override with CLI args
|
||||
if let Some(dir) = &args.options.dir {
|
||||
debug!("CONFIG: Overriding dir with CLI arg: {:?}", dir);
|
||||
config_builder = config_builder.set_override("dir", dir.to_str().unwrap())?;
|
||||
}
|
||||
|
||||
if args.options.human_readable {
|
||||
config_builder = config_builder.set_override("human_readable", true)?;
|
||||
}
|
||||
|
||||
if let Some(output_format) = &args.options.output_format {
|
||||
config_builder =
|
||||
config_builder.set_override("output_format", output_format.as_str())?;
|
||||
}
|
||||
|
||||
if args.options.verbose > 0 {
|
||||
config_builder = config_builder.set_override("verbose", args.options.verbose)?;
|
||||
}
|
||||
|
||||
if args.options.quiet {
|
||||
config_builder = config_builder.set_override("quiet", true)?;
|
||||
}
|
||||
|
||||
if args.options.force {
|
||||
config_builder = config_builder.set_override("force", true)?;
|
||||
}
|
||||
|
||||
if let Some(server_password) = &args.options.server_password {
|
||||
config_builder =
|
||||
config_builder.set_override("server.password", server_password.as_str())?;
|
||||
}
|
||||
|
||||
if let Some(server_password_hash) = &args.options.server_password_hash {
|
||||
config_builder = config_builder
|
||||
.set_override("server.password_hash", server_password_hash.as_str())?;
|
||||
}
|
||||
|
||||
if let Some(server_address) = &args.mode.server_address {
|
||||
config_builder =
|
||||
config_builder.set_override("server.address", server_address.as_str())?;
|
||||
}
|
||||
|
||||
if let Some(server_port) = args.mode.server_port {
|
||||
config_builder = config_builder.set_override("server.port", server_port)?;
|
||||
}
|
||||
|
||||
if let Some(compression) = &args.item.compression {
|
||||
config_builder =
|
||||
config_builder.set_override("compression_plugin.name", compression.as_str())?;
|
||||
}
|
||||
|
||||
if !args.item.meta_plugins.is_empty() {
|
||||
let meta_plugins: Vec<std::collections::HashMap<String, String>> = args
|
||||
.item
|
||||
.meta_plugins
|
||||
.iter()
|
||||
.map(|name| {
|
||||
let mut map = std::collections::HashMap::new();
|
||||
map.insert("name".to_string(), name.clone());
|
||||
map
|
||||
})
|
||||
.collect();
|
||||
config_builder = config_builder.set_override("meta_plugins", meta_plugins)?;
|
||||
}
|
||||
|
||||
let config = config_builder.build()?;
|
||||
debug!("CONFIG: Built config, attempting to deserialize");
|
||||
|
||||
match config.try_deserialize::<Settings>() {
|
||||
Ok(mut settings) => {
|
||||
debug!("CONFIG: Successfully deserialized settings: {:?}", settings);
|
||||
|
||||
// Set defaults for list_format if not provided
|
||||
if settings.list_format.is_empty() {
|
||||
debug!("CONFIG: Setting default list_format");
|
||||
settings.list_format = vec![
|
||||
ColumnConfig {
|
||||
name: "id".to_string(),
|
||||
label: "Item".to_string(),
|
||||
align: ColumnAlignment::Right,
|
||||
max_len: None,
|
||||
fg_color: None,
|
||||
bg_color: None,
|
||||
attributes: Vec::new(),
|
||||
padding: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "time".to_string(),
|
||||
label: "Time".to_string(),
|
||||
align: ColumnAlignment::Right,
|
||||
max_len: None,
|
||||
fg_color: None,
|
||||
bg_color: None,
|
||||
attributes: Vec::new(),
|
||||
padding: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "size".to_string(),
|
||||
label: "Size".to_string(),
|
||||
align: ColumnAlignment::Right,
|
||||
max_len: None,
|
||||
fg_color: None,
|
||||
bg_color: None,
|
||||
attributes: Vec::new(),
|
||||
padding: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "meta:text_line_count".to_string(),
|
||||
label: "Lines".to_string(),
|
||||
align: ColumnAlignment::Right,
|
||||
max_len: None,
|
||||
fg_color: None,
|
||||
bg_color: None,
|
||||
attributes: Vec::new(),
|
||||
padding: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "tags".to_string(),
|
||||
label: "Tags".to_string(),
|
||||
align: ColumnAlignment::Left,
|
||||
max_len: None,
|
||||
fg_color: None,
|
||||
bg_color: None,
|
||||
attributes: Vec::new(),
|
||||
padding: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "meta:hostname_short".to_string(),
|
||||
label: "Host".to_string(),
|
||||
align: ColumnAlignment::Left,
|
||||
max_len: None,
|
||||
fg_color: None,
|
||||
bg_color: None,
|
||||
attributes: Vec::new(),
|
||||
padding: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "meta:command".to_string(),
|
||||
label: "Command".to_string(),
|
||||
align: ColumnAlignment::Left,
|
||||
max_len: None,
|
||||
fg_color: None,
|
||||
bg_color: None,
|
||||
attributes: Vec::new(),
|
||||
padding: None,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// Set default meta_plugins to include 'env' if not provided
|
||||
if settings.meta_plugins.is_none() {
|
||||
debug!("CONFIG: Setting default meta_plugins to include 'env'");
|
||||
settings.meta_plugins = Some(vec![MetaPluginConfig {
|
||||
name: "env".to_string(),
|
||||
options: std::collections::HashMap::new(),
|
||||
outputs: std::collections::HashMap::new(),
|
||||
}]);
|
||||
}
|
||||
|
||||
// Set dir to default if not provided or is empty
|
||||
if settings.dir == PathBuf::new() {
|
||||
debug!("CONFIG: Setting default dir: {:?}", default_dir);
|
||||
settings.dir = default_dir;
|
||||
}
|
||||
|
||||
debug!("CONFIG: Final settings: {:?}", settings);
|
||||
Ok(settings)
|
||||
}
|
||||
Err(e) => {
|
||||
error!("CONFIG: Failed to deserialize settings: {}", e);
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_dir() -> anyhow::Result<PathBuf> {
|
||||
let mut path =
|
||||
dirs::home_dir().ok_or_else(|| anyhow::anyhow!("No home directory found"))?;
|
||||
path.push(".keep");
|
||||
if !path.exists() {
|
||||
std::fs::create_dir_all(&path)?;
|
||||
}
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// Get server password from password_file or directly from config if configured
|
||||
pub fn get_server_password(&self) -> Result<Option<String>> {
|
||||
if let Some(server) = &self.server {
|
||||
// First check for password_file
|
||||
if let Some(password_file) = &server.password_file {
|
||||
debug!("CONFIG: Reading password from file: {:?}", password_file);
|
||||
let password = fs::read_to_string(password_file)
|
||||
.with_context(|| format!("Failed to read password file: {:?}", password_file))?
|
||||
.trim()
|
||||
.to_string();
|
||||
return Ok(Some(password));
|
||||
}
|
||||
|
||||
// Fall back to direct password field
|
||||
if let Some(password) = &server.password {
|
||||
debug!("CONFIG: Using password from config");
|
||||
return Ok(Some(password.clone()));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
// Helper methods to access configuration values
|
||||
pub fn server_password(&self) -> Option<String> {
|
||||
self.get_server_password().ok().flatten()
|
||||
}
|
||||
|
||||
pub fn server_password_hash(&self) -> Option<String> {
|
||||
self.server.as_ref().and_then(|s| s.password_hash.clone())
|
||||
}
|
||||
|
||||
pub fn server_address(&self) -> Option<String> {
|
||||
self.server.as_ref().and_then(|s| s.address.clone())
|
||||
}
|
||||
|
||||
pub fn server_port(&self) -> Option<u16> {
|
||||
self.server.as_ref().and_then(|s| s.port)
|
||||
}
|
||||
|
||||
pub fn compression(&self) -> Option<String> {
|
||||
self.compression_plugin.as_ref().map(|c| c.name.clone())
|
||||
}
|
||||
|
||||
pub fn meta_plugins_names(&self) -> Vec<String> {
|
||||
self.meta_plugins
|
||||
.as_ref()
|
||||
.map(|plugins| plugins.iter().map(|p| p.name.clone()).collect())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
47
src/filter.pest
Normal file
47
src/filter.pest
Normal file
@@ -0,0 +1,47 @@
|
||||
# This Pest grammar defines the syntax for filter chains used in the Keep application.
|
||||
# Filters can be chained with commas and may have named or unnamed options with JSON-like values.
|
||||
|
||||
WHITESPACE = _{ " " | "\t" | "\n" | "\r" }
|
||||
|
||||
# Top-level rule for parsing multiple filters separated by commas.
|
||||
filters = { filter ~ ("," ~ filters)? }
|
||||
|
||||
# A single filter consisting of a name optionally followed by parenthesized options.
|
||||
filter = { filter_name ~ ("(" ~ options ~ ")")? }
|
||||
|
||||
# The name of a filter, starting with an ASCII letter followed by alphanumeric characters or underscores.
|
||||
filter_name = @{ ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "_")* }
|
||||
|
||||
# A list of comma-separated options within parentheses.
|
||||
options = { option ~ ("," ~ options)? }
|
||||
|
||||
# A single option, optionally with a name followed by an equals sign and a value.
|
||||
option = { (option_name ~ "=")? ~ option_value }
|
||||
|
||||
# The name of an option, starting with an ASCII letter followed by alphanumeric characters or underscores.
|
||||
option_name = @{ ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "_")* }
|
||||
|
||||
# The value of an option, which can be a JSON number, string, or boolean.
|
||||
option_value = {
|
||||
JSON_NUMBER |
|
||||
JSON_STRING |
|
||||
JSON_BOOLEAN
|
||||
}
|
||||
|
||||
# JSON number format supporting integers, decimals, and scientific notation.
|
||||
JSON_NUMBER = @{
|
||||
("-")? ~
|
||||
("0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT*) ~
|
||||
("." ~ ASCII_DIGIT*)? ~
|
||||
(("e" | "E") ~ ("+" | "-")? ~ ASCII_DIGIT+)?
|
||||
}
|
||||
|
||||
# JSON string format with escaped characters.
|
||||
JSON_STRING = ${
|
||||
"\"" ~
|
||||
(("\\" ~ ANY) | (!("\"" | "\\") ~ ANY))* ~
|
||||
"\""
|
||||
}
|
||||
|
||||
# JSON boolean values: true or false.
|
||||
JSON_BOOLEAN = ${ "true" | "false" }
|
||||
131
src/filter_parser.rs
Normal file
131
src/filter_parser.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use pest::Parser;
|
||||
use pest_derive::Parser;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[grammar = "filter.pest"]
|
||||
pub struct FilterParser;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Filter {
|
||||
pub name: String,
|
||||
pub options: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
pub fn parse_filter_string(input: &str) -> Result<Vec<Filter>, Box<dyn std::error::Error>> {
|
||||
let mut filters = Vec::new();
|
||||
let pairs = FilterParser::parse(Rule::filters, input)?;
|
||||
|
||||
for pair in pairs {
|
||||
if pair.as_rule() == Rule::filter {
|
||||
let mut name = String::new();
|
||||
let mut options = HashMap::new();
|
||||
|
||||
for inner_pair in pair.into_inner() {
|
||||
match inner_pair.as_rule() {
|
||||
Rule::filter_name => {
|
||||
name = inner_pair.as_str().to_string();
|
||||
}
|
||||
Rule::options => {
|
||||
for option_pair in inner_pair.into_inner() {
|
||||
if option_pair.as_rule() == Rule::option {
|
||||
let mut option_name = None;
|
||||
let mut option_value = None;
|
||||
|
||||
for option_inner in option_pair.into_inner() {
|
||||
match option_inner.as_rule() {
|
||||
Rule::option_name => {
|
||||
option_name = Some(option_inner.as_str().to_string());
|
||||
}
|
||||
Rule::option_value => {
|
||||
option_value = Some(parse_option_value(option_inner.as_str())?);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(value) = option_value {
|
||||
// If no name is provided, use the filter name as the key
|
||||
let key = option_name.unwrap_or_else(|| name.clone());
|
||||
options.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
filters.push(Filter { name, options });
|
||||
}
|
||||
}
|
||||
|
||||
Ok(filters)
|
||||
}
|
||||
|
||||
fn parse_option_value(input: &str) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
|
||||
// Try to parse as number
|
||||
if let Ok(num) = input.parse::<i64>() {
|
||||
return Ok(serde_json::Value::Number(num.into()));
|
||||
}
|
||||
if let Ok(num) = input.parse::<f64>() {
|
||||
if let Some(number) = serde_json::Number::from_f64(num) {
|
||||
return Ok(serde_json::Value::Number(number));
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse as boolean
|
||||
if let Ok(boolean) = input.parse::<bool>() {
|
||||
return Ok(serde_json::Value::Bool(boolean));
|
||||
}
|
||||
|
||||
// Treat as string (remove quotes if present)
|
||||
let value = if input.starts_with('"') && input.ends_with('"') {
|
||||
input[1..input.len()-1].to_string()
|
||||
} else if input.starts_with('\'') && input.ends_with('\'') {
|
||||
input[1..input.len()-1].to_string()
|
||||
} else {
|
||||
input.to_string()
|
||||
};
|
||||
|
||||
Ok(serde_json::Value::String(value))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_filter() {
|
||||
let result = parse_filter_string("grep").unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].name, "grep");
|
||||
assert!(result[0].options.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_filter_with_options() {
|
||||
let result = parse_filter_string("head_lines(10)").unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].name, "head_lines");
|
||||
assert_eq!(result[0].options["head_lines"], 10);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_filter_with_named_options() {
|
||||
let result = parse_filter_string("grep(pattern=\"error\")").unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].name, "grep");
|
||||
assert_eq!(result[0].options["pattern"], "error");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_multiple_filters() {
|
||||
let result = parse_filter_string("head_lines(10), grep(pattern=\"error\")").unwrap();
|
||||
assert_eq!(result.len(), 2);
|
||||
assert_eq!(result[0].name, "head_lines");
|
||||
assert_eq!(result[0].options["head_lines"], 10);
|
||||
assert_eq!(result[1].name, "grep");
|
||||
assert_eq!(result[1].options["pattern"], "error");
|
||||
}
|
||||
}
|
||||
225
src/filter_plugin/exec.rs
Normal file
225
src/filter_plugin/exec.rs
Normal file
@@ -0,0 +1,225 @@
|
||||
use super::{FilterPlugin, FilterOption};
|
||||
use std::io::{Result, Read, Write};
|
||||
use std::process::{Command, Stdio, Child};
|
||||
use which::which;
|
||||
use log::*;
|
||||
|
||||
/// A filter that executes an external program and pipes input through it.
|
||||
///
|
||||
/// This filter spawns an external command, pipes the input stream to its stdin,
|
||||
/// and writes the stdout to the output stream. Supports async-like behavior via
|
||||
/// threads for concurrent I/O. Requires the program to be available on PATH.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExecFilter {
|
||||
program: String,
|
||||
args: Vec<String>,
|
||||
supported: bool,
|
||||
split_whitespace: bool,
|
||||
child_process: Option<Child>,
|
||||
stdin_writer: Option<std::process::ChildStdin>,
|
||||
stdout_reader: Option<std::process::ChildStdout>,
|
||||
}
|
||||
|
||||
impl ExecFilter {
|
||||
/// Creates a new `ExecFilter` for the specified program and arguments.
|
||||
///
|
||||
/// Checks if the program is available using `which` and stores the resolved path.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `program` - The name or path of the program to execute.
|
||||
/// * `args` - A slice of string slices representing the arguments to pass to the program.
|
||||
/// * `split_whitespace` - Whether to split arguments on whitespace when parsing (unused in this context).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `ExecFilter` instance.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use keep::filter_plugin::exec::ExecFilter;
|
||||
///
|
||||
/// let filter = ExecFilter::new("grep", vec!["-i", "error"], false);
|
||||
/// assert!(filter.supported);
|
||||
/// ```
|
||||
pub fn new(
|
||||
program: &str,
|
||||
args: Vec<&str>,
|
||||
split_whitespace: bool,
|
||||
) -> ExecFilter {
|
||||
let program_path = which(program);
|
||||
let supported = program_path.is_ok();
|
||||
|
||||
ExecFilter {
|
||||
program: program_path.map_or_else(|| program.to_string(), |p| p.to_string_lossy().to_string()),
|
||||
args: args.iter().map(|s| s.to_string()).collect(),
|
||||
supported,
|
||||
split_whitespace,
|
||||
child_process: None,
|
||||
stdin_writer: None,
|
||||
stdout_reader: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterPlugin for ExecFilter {
|
||||
/// Filters the input by piping it through the external program and writing the output.
|
||||
///
|
||||
/// Spawns the process with piped I/O, uses threads for concurrent input/output
|
||||
/// copying, and waits for completion. Errors if the program isn't found or fails.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - A boxed mutable reference to the input reader providing the data stream to pipe to the program.
|
||||
/// * `writer` - A boxed mutable reference to the output writer where the program's output is sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` on success, or an `io::Error` if process spawning, piping, or execution fails.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * NotFound - Program not available.
|
||||
/// * Other - Spawn, I/O, or wait failures.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use keep::filter_plugin::exec::ExecFilter;
|
||||
/// use std::io::{Read, Write};
|
||||
///
|
||||
/// let mut filter = ExecFilter::new("cat", vec![], false);
|
||||
/// // In filter context:
|
||||
/// filter.filter(Box::new(&mut input), Box::new(&mut output)).unwrap();
|
||||
/// ```
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
if !self.supported {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("Program '{}' not found", self.program),
|
||||
));
|
||||
}
|
||||
|
||||
debug!("FILTER_EXEC: Executing command: {} {:?}", self.program, self.args);
|
||||
|
||||
// Read all input first
|
||||
let mut input_data = Vec::new();
|
||||
std::io::copy(reader, &mut input_data)?;
|
||||
|
||||
let mut child = Command::new(&self.program)
|
||||
.args(&self.args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!("Failed to spawn process '{}': {}", self.program, e),
|
||||
)
|
||||
})?;
|
||||
|
||||
let mut stdin = child.stdin.take().ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Failed to capture stdin from child process",
|
||||
)
|
||||
})?;
|
||||
|
||||
// Write input to child stdin
|
||||
stdin.write_all(&input_data)?;
|
||||
drop(stdin); // Close stdin to signal EOF
|
||||
|
||||
let mut stdout = child.stdout.take().ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Failed to capture stdout from child process",
|
||||
)
|
||||
})?;
|
||||
|
||||
// Copy stdout to writer
|
||||
std::io::copy(&mut stdout, writer)?;
|
||||
|
||||
// Wait for the child process to finish
|
||||
let output = child.wait_with_output()
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!("Failed to wait on child process: {}", e),
|
||||
)
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
if !stderr.is_empty() {
|
||||
warn!("FILTER_EXEC: Process stderr: {}", stderr);
|
||||
}
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!("Process exited with error: {:?}", output.status),
|
||||
));
|
||||
}
|
||||
|
||||
debug!("FILTER_EXEC: Process completed successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// Creates a new instance without active process handles.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` representing a clone of this filter.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(ExecFilter {
|
||||
program: self.program.clone(),
|
||||
args: self.args.clone(),
|
||||
supported: self.supported,
|
||||
split_whitespace: self.split_whitespace,
|
||||
child_process: None,
|
||||
stdin_writer: None,
|
||||
stdout_reader: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// Defines "command" as required and "split_whitespace" as optional.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of `FilterOption` describing the filter's configurable parameters.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![
|
||||
FilterOption {
|
||||
name: "command".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
},
|
||||
FilterOption {
|
||||
name: "split_whitespace".to_string(),
|
||||
default: Some(serde_json::Value::Bool(true)),
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_exec_filter() {
|
||||
crate::services::filter_service::register_filter_plugin("exec", || {
|
||||
// Create a dummy instance - actual creation happens in create method
|
||||
Box::new(ExecFilter {
|
||||
program: String::new(),
|
||||
args: Vec::new(),
|
||||
supported: false,
|
||||
split_whitespace: true,
|
||||
child_process: None,
|
||||
stdin_writer: None,
|
||||
stdout_reader: None,
|
||||
})
|
||||
});
|
||||
}
|
||||
123
src/filter_plugin/grep.rs
Normal file
123
src/filter_plugin/grep.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use super::{FilterOption, FilterPlugin};
|
||||
use regex::Regex;
|
||||
use std::io::{BufRead, Read, Result, Write};
|
||||
|
||||
/// A filter that matches lines against a regular expression pattern.
|
||||
///
|
||||
/// Outputs only lines that match the given regex. Uses BufRead for line-by-line processing
|
||||
/// and preserves original line endings.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `regex` - Compiled regex for matching.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GrepFilter {
|
||||
regex: Regex,
|
||||
}
|
||||
|
||||
/// Creates a new `GrepFilter` with the specified regex pattern.
|
||||
///
|
||||
/// Compiles the pattern using regex crate.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `pattern` - The regular expression pattern (string) used to match lines.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Ok(Self)` on success.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns `Err(io::Error::InvalidInput)` if pattern compilation fails (invalid regex).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let filter = GrepFilter::new("error|warn".to_string())?;
|
||||
/// ```
|
||||
impl GrepFilter {
|
||||
pub fn new(pattern: String) -> Result<Self> {
|
||||
let regex = Regex::new(&pattern)
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e))?;
|
||||
Ok(Self { regex })
|
||||
}
|
||||
}
|
||||
|
||||
/// Filters the input by writing only lines that match the regex pattern.
|
||||
///
|
||||
/// Reads lines from the input and writes matching lines to the output, preserving newlines.
|
||||
/// Uses BufReader for efficient line iteration.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input data stream.
|
||||
/// * `writer` - Mutable reference to the output writer where matching lines are sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Ok(())` on success.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Propagates `io::Error` from BufRead lines() or writeln! (e.g., read/write failures, UTF-8 issues).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// filter.filter(&mut input, &mut output)?;
|
||||
/// ```
|
||||
impl FilterPlugin for GrepFilter {
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
let mut buf_reader = std::io::BufReader::new(reader);
|
||||
for line in buf_reader.by_ref().lines() {
|
||||
let line = line?;
|
||||
if self.regex.is_match(&line) {
|
||||
writeln!(writer, "{}", line)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// Creates a new GrepFilter with the same regex pattern.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` representing a clone of this filter.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let cloned = filter.clone_box();
|
||||
/// ```
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self {
|
||||
regex: self.regex.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// The only option is the required "pattern" for the regex.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector containing one `FilterOption` for "pattern" (required, no default).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let opts = filter.options();
|
||||
/// assert_eq!(opts.len(), 1);
|
||||
/// assert!(opts[0].required);
|
||||
/// ```
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![FilterOption {
|
||||
name: "pattern".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
}
|
||||
230
src/filter_plugin/head.rs
Normal file
230
src/filter_plugin/head.rs
Normal file
@@ -0,0 +1,230 @@
|
||||
use super::{FilterOption, FilterPlugin};
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::services::filter_service::register_filter_plugin;
|
||||
use std::io::{BufRead, Read, Result, Write};
|
||||
|
||||
/// A filter that reads the first N bytes from the input stream.
|
||||
///
|
||||
/// Limits the output to the initial bytes specified in the configuration.
|
||||
/// Useful for previewing file contents without reading everything.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `remaining` - Number of bytes left to read before stopping.
|
||||
pub struct HeadBytesFilter {
|
||||
remaining: usize,
|
||||
}
|
||||
|
||||
/// A filter that reads the first N bytes from the input stream.
|
||||
///
|
||||
/// Limits the output to the initial bytes specified in the configuration.
|
||||
/// Useful for previewing file contents without reading everything.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `remaining` - Number of bytes left to read before stopping.
|
||||
impl HeadBytesFilter {
|
||||
/// Creates a new `HeadBytesFilter` that will read up to the specified number of bytes.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `count` - The maximum number of bytes to read from the input.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new instance configured to read at most `count` bytes.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let filter = HeadBytesFilter::new(1024);
|
||||
/// assert_eq!(filter.remaining, 1024);
|
||||
/// ```
|
||||
pub fn new(count: usize) -> Self {
|
||||
Self { remaining: count }
|
||||
}
|
||||
}
|
||||
|
||||
/// Filters input by reading only the first N bytes and writing them to the output.
|
||||
///
|
||||
/// Reads from the input in chunks until the byte limit is reached or EOF, then writes
|
||||
/// the collected bytes to the output. Stops early if the limit is zero.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input data stream.
|
||||
/// * `writer` - Mutable reference to the output stream.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<()>` - Success if filtering completes, or I/O error.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `io::Error` from reading or writing operations.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// // Assuming a filter chain with head_bytes(5)
|
||||
/// // Input "Hello World" becomes "Hello"
|
||||
/// ```
|
||||
impl FilterPlugin for HeadBytesFilter {
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
if self.remaining == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut buffer = vec![0; PIPESIZE];
|
||||
while self.remaining > 0 {
|
||||
let to_read = std::cmp::min(self.remaining, PIPESIZE);
|
||||
let bytes_read = reader.read(&mut buffer[..to_read])?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
writer.write_all(&buffer[..bytes_read])?;
|
||||
self.remaining -= bytes_read;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// Creates an independent copy with the same configuration.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` clone.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self {
|
||||
remaining: self.remaining,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// Defines the "count" parameter as required with no default.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Vector of `FilterOption` describing parameters.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![FilterOption {
|
||||
name: "count".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
/// A filter that reads the first N lines from the input stream.
|
||||
pub struct HeadLinesFilter {
|
||||
remaining: usize,
|
||||
}
|
||||
|
||||
/// A filter that reads the first N lines from the input stream.
|
||||
///
|
||||
/// Limits output to the initial lines specified, writing each full line to output.
|
||||
/// Handles line endings properly using buffered reading.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `remaining` - Number of lines left to read before stopping.
|
||||
impl HeadLinesFilter {
|
||||
/// Creates a new `HeadLinesFilter` that will read up to the specified number of lines.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `count` - The maximum number of lines to read from the input.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new instance configured to read at most `count` lines.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let filter = HeadLinesFilter::new(3);
|
||||
/// assert_eq!(filter.remaining, 3);
|
||||
/// ```
|
||||
pub fn new(count: usize) -> Self {
|
||||
Self { remaining: count }
|
||||
}
|
||||
}
|
||||
|
||||
/// Filters input by reading only the first N lines and writing them to the output.
|
||||
///
|
||||
/// Uses buffered line reading to process input line-by-line until the limit or EOF.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input data stream.
|
||||
/// * `writer` - Mutable reference to the output stream.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<()>` - Success if filtering completes, or I/O error.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `io::Error` from line reading or writing operations.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// // Assuming a filter chain with head_lines(2)
|
||||
/// // Input: "Line1\nLine2\nLine3" becomes "Line1\nLine2\n"
|
||||
/// ```
|
||||
impl FilterPlugin for HeadLinesFilter {
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
if self.remaining == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut buf_reader = std::io::BufReader::new(reader);
|
||||
for line in buf_reader.by_ref().lines() {
|
||||
let line = line?;
|
||||
writeln!(writer, "{}", line)?;
|
||||
self.remaining -= 1;
|
||||
if self.remaining == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// Creates an independent copy with the same configuration.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` clone.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self {
|
||||
remaining: self.remaining,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// Defines the "count" parameter as required with no default.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Vector of `FilterOption` describing parameters.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![FilterOption {
|
||||
name: "count".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_head_filters() {
|
||||
register_filter_plugin("head_bytes", || Box::new(HeadBytesFilter::new(0)));
|
||||
register_filter_plugin("head_lines", || Box::new(HeadLinesFilter::new(0)));
|
||||
}
|
||||
632
src/filter_plugin/mod.rs
Normal file
632
src/filter_plugin/mod.rs
Normal file
@@ -0,0 +1,632 @@
|
||||
use std::io::{Read, Result, Write};
|
||||
use std::str::FromStr;
|
||||
use strum::EnumString;
|
||||
|
||||
pub mod grep;
|
||||
/// Filter plugin module for processing input streams.
|
||||
///
|
||||
/// This module defines the `FilterPlugin` trait and `FilterChain` for chaining filters,
|
||||
/// along with parsing utilities for filter strings. Filters can process data like head/tail,
|
||||
/// grep, etc.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// Parse a filter string and apply to a reader:
|
||||
///
|
||||
/// ```
|
||||
/// let chain = parse_filter_string("head_lines(10)|grep(pattern=error)")?;
|
||||
/// chain.filter(&mut reader, &mut writer)?;
|
||||
/// ```
|
||||
pub mod head;
|
||||
pub mod skip;
|
||||
pub mod strip_ansi;
|
||||
pub mod tail;
|
||||
pub mod utils;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub use grep::GrepFilter;
|
||||
pub use head::{HeadBytesFilter, HeadLinesFilter};
|
||||
pub use skip::{SkipBytesFilter, SkipLinesFilter};
|
||||
pub use strip_ansi::StripAnsiFilter;
|
||||
pub use tail::{TailBytesFilter, TailLinesFilter};
|
||||
|
||||
/// Represents an option for a filter plugin.
|
||||
///
|
||||
/// Defines a configurable parameter for filters, with name, default, and required flag.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `name` - Option name.
|
||||
/// * `default` - Optional default value.
|
||||
/// * `required` - If true, must be provided.
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
#[cfg_attr(feature = "server", derive(utoipa::ToSchema))]
|
||||
pub struct FilterOption {
|
||||
pub name: String,
|
||||
#[cfg_attr(feature = "server", schema(value_type = Option<Object>))]
|
||||
pub default: Option<serde_json::Value>,
|
||||
pub required: bool,
|
||||
}
|
||||
|
||||
/// Trait for filter plugins that process input streams.
|
||||
///
|
||||
/// Implement this trait to create a filter that reads from an input stream and writes filtered output.
|
||||
///
|
||||
/// # Required Methods
|
||||
///
|
||||
/// * `filter` - Process the stream.
|
||||
/// * `clone_box` - For cloning dynamic instances.
|
||||
/// * `options` - Describe configurable options.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// impl FilterPlugin for MyFilter {
|
||||
/// fn filter(&mut self, reader: Box<&mut dyn Read>, writer: Box<&mut dyn Write>) -> Result<()> {
|
||||
/// // Implementation
|
||||
/// }
|
||||
/// // ...
|
||||
/// }
|
||||
/// ```
|
||||
pub trait FilterPlugin: Send {
|
||||
/// Processes the input stream and writes the filtered output.
|
||||
///
|
||||
/// This method reads from the input reader and applies filtering logic,
|
||||
/// writing the processed data to the output writer.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - A boxed mutable reference to the input reader providing the data to filter.
|
||||
/// * `writer` - A boxed mutable reference to the output writer where the processed data is written.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `Result` indicating success (`Ok(())`) or failure with an `io::Error`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// impl FilterPlugin for MyFilter {
|
||||
/// fn filter(&mut self, reader: Box<&mut dyn Read>, writer: Box<&mut dyn Write>) -> Result<()> {
|
||||
/// // Read and filter data
|
||||
/// let mut buf = [0; 1024];
|
||||
/// while let Ok(n) = reader.as_mut().read(&mut buf) {
|
||||
/// if n == 0 { break; }
|
||||
/// // Apply filter logic to buf[0..n]
|
||||
/// writer.as_mut().write_all(&buf[0..n])?;
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// // ... other methods
|
||||
/// }
|
||||
/// ```
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
let _ = std::io::copy(reader, writer)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this plugin into a new boxed instance.
|
||||
///
|
||||
/// This method is required for dynamic dispatch and cloning in filter chains.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` clone of the current plugin.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
/// Box::new(self.clone())
|
||||
/// }
|
||||
/// ```
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin>;
|
||||
|
||||
/// Returns the configuration options for this plugin.
|
||||
///
|
||||
/// Describes the configurable parameters, including names, defaults, and required flags.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of `FilterOption` structs describing the plugin's options.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// fn options(&self) -> Vec<FilterOption> {
|
||||
/// vec![
|
||||
/// FilterOption {
|
||||
/// name: "pattern".to_string(),
|
||||
/// default: None,
|
||||
/// required: true,
|
||||
/// },
|
||||
/// ]
|
||||
/// }
|
||||
/// ```
|
||||
fn options(&self) -> Vec<FilterOption>;
|
||||
}
|
||||
|
||||
/// Enum representing the different types of filters.
|
||||
///
|
||||
/// Used for parsing and instantiating specific filter plugins.
|
||||
///
|
||||
/// # Variants
|
||||
///
|
||||
/// * `HeadBytes` - Head by bytes.
|
||||
/// * `HeadLines` - Head by lines.
|
||||
/// * ... etc.
|
||||
#[derive(Debug, EnumString, strum::VariantNames, strum::Display)]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum FilterType {
|
||||
HeadBytes,
|
||||
HeadLines,
|
||||
TailBytes,
|
||||
TailLines,
|
||||
SkipBytes,
|
||||
SkipLines,
|
||||
Grep,
|
||||
StripAnsi,
|
||||
}
|
||||
|
||||
/// A chain of filter plugins applied sequentially.
|
||||
///
|
||||
/// Chains multiple filters, applying them in order to the input stream.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `plugins` - Vector of boxed filter plugins.
|
||||
pub struct FilterChain {
|
||||
plugins: Vec<Box<dyn FilterPlugin>>,
|
||||
}
|
||||
|
||||
/// A chain of filter plugins applied sequentially.
|
||||
///
|
||||
/// Chains multiple filters, applying them in order to the input stream.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `plugins` - Vector of boxed filter plugins.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut chain = FilterChain::new();
|
||||
/// chain.add_plugin(Box::new(HeadLinesFilter::new(10)));
|
||||
/// chain.filter(&mut reader, &mut writer)?;
|
||||
/// ```
|
||||
impl Clone for FilterChain {
|
||||
/// Clones this filter chain.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `FilterChain` with cloned plugins.
|
||||
fn clone(&self) -> Self {
|
||||
let mut plugins = Vec::with_capacity(self.plugins.len());
|
||||
for plugin in &self.plugins {
|
||||
plugins.push(plugin.clone_box());
|
||||
}
|
||||
FilterChain { plugins }
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Box<dyn FilterPlugin> {
|
||||
/// Clones the boxed filter plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new boxed clone of the filter plugin.
|
||||
fn clone(&self) -> Self {
|
||||
self.clone_box()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FilterChain {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterChain {
|
||||
/// Creates a new empty filter chain.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `FilterChain` with no plugins.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let chain = FilterChain::new();
|
||||
/// assert!(chain.plugins.is_empty());
|
||||
/// ```
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
plugins: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a plugin to the chain.
|
||||
///
|
||||
/// Plugins are applied in the order they are added.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `plugin` - The boxed filter plugin to add to the chain.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut chain = FilterChain::new();
|
||||
/// chain.add_plugin(Box::new(GrepFilter::new("error".to_string())));
|
||||
/// ```
|
||||
pub fn add_plugin(&mut self, plugin: Box<dyn FilterPlugin>) {
|
||||
self.plugins.push(plugin);
|
||||
}
|
||||
|
||||
/// Applies the filter chain to the input and writes to the output.
|
||||
///
|
||||
/// If no plugins are present, data is copied directly from reader to writer.
|
||||
/// For multiple plugins, intermediate results are buffered.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - A mutable reference to the input reader providing the data stream.
|
||||
/// * `writer` - A mutable reference to the output writer where the fully filtered data is sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `Result` indicating success (`Ok(())`) or failure with an `io::Error` if any filter in the chain fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut chain = FilterChain::new();
|
||||
/// chain.add_plugin(Box::new(HeadBytesFilter::new(100)));
|
||||
/// chain.filter(&mut input_reader, &mut output_writer)?;
|
||||
/// ```
|
||||
pub fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
if self.plugins.is_empty() {
|
||||
// If no plugins, just copy the input to output
|
||||
std::io::copy(reader, writer)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// For multiple plugins, we need to chain them together
|
||||
// We'll use a temporary buffer to hold intermediate results
|
||||
let mut current_data = Vec::new();
|
||||
std::io::copy(reader, &mut current_data)?;
|
||||
|
||||
// Store the plugins length to avoid borrowing issues
|
||||
let plugins_len = self.plugins.len();
|
||||
|
||||
for i in 0..plugins_len {
|
||||
// Create a cursor for the current data
|
||||
let mut input = std::io::Cursor::new(std::mem::take(&mut current_data));
|
||||
|
||||
// For the last plugin, write directly to the output writer
|
||||
if i == plugins_len - 1 {
|
||||
self.plugins[i].filter(&mut input, writer)?;
|
||||
} else {
|
||||
// For intermediate plugins, write to a buffer
|
||||
let mut output_vec = Vec::new();
|
||||
self.plugins[i].filter(&mut input, &mut output_vec)?;
|
||||
current_data = output_vec;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a filter string into a `FilterChain`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `filter_str` - The filter string specifying the chain, e.g., "head_lines(10)|grep(pattern=error)".
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `Result` containing the parsed `FilterChain` on success, or an `io::Error` if the string is invalid.
|
||||
pub fn parse_filter_string(filter_str: &str) -> Result<FilterChain> {
|
||||
let mut chain = FilterChain::new();
|
||||
|
||||
for part in filter_str.split('|') {
|
||||
let part = part.trim();
|
||||
if part.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse the filter name and parameters
|
||||
if let Some((filter_name, params)) = part.split_once('(') {
|
||||
if let Some(params) = params.strip_suffix(')') {
|
||||
// Parse parameters
|
||||
let mut options = HashMap::new();
|
||||
let mut unnamed_params = Vec::new();
|
||||
|
||||
// Split parameters by commas
|
||||
for param in params.split(',') {
|
||||
let param = param.trim();
|
||||
if param.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if it's a named parameter (key=value)
|
||||
if let Some((key, value)) = param.split_once('=') {
|
||||
let key = key.trim();
|
||||
let value = parse_option_value(value.trim())?;
|
||||
options.insert(key.to_string(), value);
|
||||
} else {
|
||||
// Unnamed parameter
|
||||
let value = parse_option_value(param)?;
|
||||
unnamed_params.push(value);
|
||||
}
|
||||
}
|
||||
|
||||
// Create the appropriate filter plugin
|
||||
if let Ok(filter_type) = FilterType::from_str(filter_name) {
|
||||
let plugin =
|
||||
create_filter_with_options(filter_type, &unnamed_params, &options)?;
|
||||
chain.add_plugin(plugin);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Handle filters without parameters
|
||||
if let Ok(filter_type) = FilterType::from_str(part) {
|
||||
match filter_type {
|
||||
FilterType::StripAnsi => {
|
||||
chain.add_plugin(Box::new(strip_ansi::StripAnsiFilter::new()));
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!("Filter '{}' requires parameters", part),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, the filter wasn't recognized
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!("Unknown filter: {}", part),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(chain)
|
||||
}
|
||||
|
||||
/// Creates a filter plugin with the given options.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `filter_type` - The enum variant indicating the type of filter to instantiate.
|
||||
/// * `unnamed_params` - A slice of unnamed JSON parameters passed to the filter.
|
||||
/// * `named_options` - A hashmap of named options as key-value pairs.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `Result` containing a boxed `FilterPlugin` on success, or an `io::Error` if creation fails.
|
||||
fn create_filter_with_options(
|
||||
filter_type: FilterType,
|
||||
unnamed_params: &[serde_json::Value],
|
||||
named_options: &HashMap<String, serde_json::Value>,
|
||||
) -> Result<Box<dyn FilterPlugin>> {
|
||||
// Get the default options for this filter type by creating a temporary instance
|
||||
// To do this, we need to create a default instance of the appropriate filter
|
||||
let option_defs = match filter_type {
|
||||
FilterType::Grep => grep::GrepFilter::new("".to_string())?.options(),
|
||||
FilterType::HeadBytes => head::HeadBytesFilter::new(0).options(),
|
||||
FilterType::HeadLines => head::HeadLinesFilter::new(0).options(),
|
||||
FilterType::TailBytes => tail::TailBytesFilter::new(0).options(),
|
||||
FilterType::TailLines => tail::TailLinesFilter::new(0).options(),
|
||||
FilterType::SkipBytes => skip::SkipBytesFilter::new(0).options(),
|
||||
FilterType::SkipLines => skip::SkipLinesFilter::new(0).options(),
|
||||
FilterType::StripAnsi => strip_ansi::StripAnsiFilter::new().options(),
|
||||
};
|
||||
|
||||
let mut options = HashMap::new();
|
||||
|
||||
// Process unnamed parameters
|
||||
if unnamed_params.len() > option_defs.len() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Too many unnamed parameters (expected at most {})",
|
||||
option_defs.len()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
for (i, param) in unnamed_params.iter().enumerate() {
|
||||
if i >= option_defs.len() {
|
||||
break;
|
||||
}
|
||||
let option_name = &option_defs[i].name;
|
||||
options.insert(option_name.clone(), param.clone());
|
||||
}
|
||||
|
||||
// Process named options
|
||||
for (key, value) in named_options {
|
||||
// Check if the option exists
|
||||
if !option_defs.iter().any(|opt| &opt.name == key) {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!("Unknown option '{}'", key),
|
||||
));
|
||||
}
|
||||
options.insert(key.clone(), value.clone());
|
||||
}
|
||||
|
||||
// Fill in defaults and check required options
|
||||
for opt_def in option_defs {
|
||||
if !options.contains_key(&opt_def.name) {
|
||||
if let Some(default) = &opt_def.default {
|
||||
options.insert(opt_def.name.clone(), default.clone());
|
||||
} else if opt_def.required {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!("Missing required option '{}'", opt_def.name),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the specific filter with the processed options
|
||||
create_specific_filter(filter_type, &options)
|
||||
}
|
||||
|
||||
/// Creates a specific filter instance based on type and options.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `filter_type` - The enum variant indicating the type of filter to instantiate.
|
||||
/// * `options` - A reference to the hashmap of processed options for the filter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `Result` containing a boxed `FilterPlugin` on success, or an `io::Error` if instantiation fails.
|
||||
fn create_specific_filter(
|
||||
filter_type: FilterType,
|
||||
options: &HashMap<String, serde_json::Value>,
|
||||
) -> Result<Box<dyn FilterPlugin>> {
|
||||
match filter_type {
|
||||
FilterType::Grep => {
|
||||
let pattern = options
|
||||
.get("pattern")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"grep filter requires 'pattern' parameter",
|
||||
)
|
||||
})?;
|
||||
grep::GrepFilter::new(pattern.to_string()).map(|f| Box::new(f) as Box<dyn FilterPlugin>)
|
||||
}
|
||||
FilterType::HeadBytes => {
|
||||
let count = options
|
||||
.get("count")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|n| n as usize)
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"head_bytes filter requires 'count' parameter",
|
||||
)
|
||||
})?;
|
||||
Ok(Box::new(head::HeadBytesFilter::new(count)))
|
||||
}
|
||||
FilterType::HeadLines => {
|
||||
let count = options
|
||||
.get("count")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|n| n as usize)
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"head_lines filter requires 'count' parameter",
|
||||
)
|
||||
})?;
|
||||
Ok(Box::new(head::HeadLinesFilter::new(count)))
|
||||
}
|
||||
FilterType::TailBytes => {
|
||||
let count = options
|
||||
.get("count")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|n| n as usize)
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"tail_bytes filter requires 'count' parameter",
|
||||
)
|
||||
})?;
|
||||
Ok(Box::new(tail::TailBytesFilter::new(count)))
|
||||
}
|
||||
FilterType::TailLines => {
|
||||
let count = options
|
||||
.get("count")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|n| n as usize)
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"tail_lines filter requires 'count' parameter",
|
||||
)
|
||||
})?;
|
||||
Ok(Box::new(tail::TailLinesFilter::new(count)))
|
||||
}
|
||||
FilterType::SkipBytes => {
|
||||
let count = options
|
||||
.get("count")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|n| n as usize)
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"skip_bytes filter requires 'count' parameter",
|
||||
)
|
||||
})?;
|
||||
Ok(Box::new(skip::SkipBytesFilter::new(count)))
|
||||
}
|
||||
FilterType::SkipLines => {
|
||||
let count = options
|
||||
.get("count")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|n| n as usize)
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"skip_lines filter requires 'count' parameter",
|
||||
)
|
||||
})?;
|
||||
Ok(Box::new(skip::SkipLinesFilter::new(count)))
|
||||
}
|
||||
FilterType::StripAnsi => {
|
||||
// StripAnsi doesn't take any parameters
|
||||
if !options.is_empty() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"strip_ansi filter doesn't take parameters",
|
||||
));
|
||||
}
|
||||
Ok(Box::new(strip_ansi::StripAnsiFilter::new()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses an option value from a string into a JSON value.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `input` - The input string.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `Result` containing the parsed JSON value.
|
||||
fn parse_option_value(input: &str) -> Result<serde_json::Value> {
|
||||
// Remove quotes if present
|
||||
let input = input.trim_matches(|c| c == '\'' || c == '"');
|
||||
|
||||
// Try to parse as number
|
||||
if let Ok(num) = input.parse::<i64>() {
|
||||
return Ok(serde_json::Value::Number(num.into()));
|
||||
}
|
||||
if let Ok(num) = input.parse::<f64>()
|
||||
&& let Some(number) = serde_json::Number::from_f64(num)
|
||||
{
|
||||
return Ok(serde_json::Value::Number(number));
|
||||
}
|
||||
|
||||
// Try to parse as boolean
|
||||
if input.eq_ignore_ascii_case("true") {
|
||||
return Ok(serde_json::Value::Bool(true));
|
||||
}
|
||||
if input.eq_ignore_ascii_case("false") {
|
||||
return Ok(serde_json::Value::Bool(false));
|
||||
}
|
||||
|
||||
// Treat as string
|
||||
Ok(serde_json::Value::String(input.to_string()))
|
||||
}
|
||||
147
src/filter_plugin/skip.rs
Normal file
147
src/filter_plugin/skip.rs
Normal file
@@ -0,0 +1,147 @@
|
||||
use super::{FilterOption, FilterPlugin};
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::services::filter_service::register_filter_plugin;
|
||||
use std::io::{BufRead, Read, Result, Write};
|
||||
|
||||
/// A filter that skips the first N bytes from the input stream.
|
||||
pub struct SkipBytesFilter {
|
||||
remaining: usize,
|
||||
}
|
||||
|
||||
impl SkipBytesFilter {
|
||||
/// Creates a new `SkipBytesFilter` that will skip the specified number of bytes.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `count` - The number of bytes to skip from the beginning of the input.
|
||||
pub fn new(count: usize) -> Self {
|
||||
Self { remaining: count }
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterPlugin for SkipBytesFilter {
|
||||
/// Filters the input by skipping the first N bytes and writing the rest to the output.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input reader providing the data stream.
|
||||
/// * `writer` - Mutable reference to the output writer where filtered data is sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` on success, or an `io::Error` if reading or writing fails.
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
// Skip bytes in chunks
|
||||
if self.remaining > 0 {
|
||||
let mut buffer = vec![0; PIPESIZE];
|
||||
while self.remaining > 0 {
|
||||
let to_read = std::cmp::min(self.remaining, PIPESIZE);
|
||||
let bytes_read = reader.read(&mut buffer[..to_read])?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
self.remaining -= bytes_read;
|
||||
}
|
||||
}
|
||||
|
||||
// Copy the remaining data using io::copy for efficiency
|
||||
std::io::copy(reader, writer)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` representing a clone of this filter.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self {
|
||||
remaining: self.remaining,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of `FilterOption` describing the filter's configurable parameters.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![FilterOption {
|
||||
name: "count".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
/// A filter that skips the first N lines from the input stream.
|
||||
pub struct SkipLinesFilter {
|
||||
remaining: usize,
|
||||
}
|
||||
|
||||
impl SkipLinesFilter {
|
||||
/// Creates a new `SkipLinesFilter` that will skip the specified number of lines.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `count` - The number of lines to skip from the beginning of the input.
|
||||
pub fn new(count: usize) -> Self {
|
||||
Self { remaining: count }
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterPlugin for SkipLinesFilter {
|
||||
/// Filters the input by skipping the first N lines and writing the rest to the output.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input reader providing the data stream.
|
||||
/// * `writer` - Mutable reference to the output writer where filtered data is sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` on success, or an `io::Error` if reading or writing fails.
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
let mut buf_reader = std::io::BufReader::new(reader);
|
||||
for line in buf_reader.by_ref().lines() {
|
||||
let line = line?;
|
||||
if self.remaining > 0 {
|
||||
self.remaining -= 1;
|
||||
} else {
|
||||
writeln!(writer, "{}", line)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` representing a clone of this filter.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self {
|
||||
remaining: self.remaining,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of `FilterOption` describing the filter's configurable parameters.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![FilterOption {
|
||||
name: "count".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_skip_filters() {
|
||||
register_filter_plugin("skip_bytes", || Box::new(SkipBytesFilter::new(0)));
|
||||
register_filter_plugin("skip_lines", || Box::new(SkipLinesFilter::new(0)));
|
||||
}
|
||||
59
src/filter_plugin/strip_ansi.rs
Normal file
59
src/filter_plugin/strip_ansi.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
use super::{FilterOption, FilterPlugin};
|
||||
use std::io::{Read, Result, Write};
|
||||
use strip_ansi_escapes::Writer;
|
||||
|
||||
/// A filter that removes ANSI escape sequences from the input.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// None, stateless filter.
|
||||
#[derive(Default)]
|
||||
pub struct StripAnsiFilter;
|
||||
|
||||
impl StripAnsiFilter {
|
||||
/// Creates a new `StripAnsiFilter`.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new instance of `StripAnsiFilter`.
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterPlugin for StripAnsiFilter {
|
||||
/// Filters the input by stripping ANSI escape sequences and writing the plain text to the output.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input reader providing the data stream with potential ANSI codes.
|
||||
/// * `writer` - Mutable reference to the output writer where plain text is sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` on success, or an `io::Error` if reading or writing fails.
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
let mut ansi_writer = Writer::new(writer);
|
||||
std::io::copy(reader, &mut ansi_writer)?;
|
||||
ansi_writer.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` representing a clone of this filter.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self)
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter (none required).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An empty vector since this filter has no configurable options.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
Vec::new() // strip_ansi doesn't take any options
|
||||
}
|
||||
}
|
||||
166
src/filter_plugin/tail.rs
Normal file
166
src/filter_plugin/tail.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
use super::{FilterOption, FilterPlugin};
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::services::filter_service::register_filter_plugin;
|
||||
use std::collections::VecDeque;
|
||||
use std::io::{BufRead, Read, Result, Write};
|
||||
|
||||
/// A filter that reads the last N bytes from the input stream.
|
||||
pub struct TailBytesFilter {
|
||||
buffer: VecDeque<u8>,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl TailBytesFilter {
|
||||
/// Creates a new `TailBytesFilter` that will keep the last specified number of bytes.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `count` - The number of bytes to retain from the end of the input.
|
||||
pub fn new(count: usize) -> Self {
|
||||
Self {
|
||||
buffer: VecDeque::with_capacity(count),
|
||||
count,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterPlugin for TailBytesFilter {
|
||||
/// Filters the input by keeping only the last N bytes and writing them to the output.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input reader providing the data stream.
|
||||
/// * `writer` - Mutable reference to the output writer where filtered data is sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` on success, or an `io::Error` if reading or writing fails.
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
let mut temp_buffer = vec![0; PIPESIZE];
|
||||
loop {
|
||||
let bytes_read = reader.read(&mut temp_buffer)?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
// Add new data to the buffer
|
||||
for &byte in &temp_buffer[..bytes_read] {
|
||||
if self.buffer.len() == self.count {
|
||||
self.buffer.pop_front();
|
||||
}
|
||||
self.buffer.push_back(byte);
|
||||
}
|
||||
}
|
||||
|
||||
// Write the buffered data at the end
|
||||
let result: Vec<u8> = self.buffer.iter().cloned().collect();
|
||||
writer.write_all(&result)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` representing a clone of this filter.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self {
|
||||
buffer: self.buffer.clone(),
|
||||
count: self.count,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of `FilterOption` describing the filter's configurable parameters.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![FilterOption {
|
||||
name: "count".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
/// A filter that reads the last N lines from the input stream.
|
||||
pub struct TailLinesFilter {
|
||||
lines: VecDeque<String>,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl TailLinesFilter {
|
||||
/// Creates a new `TailLinesFilter` that will keep the last specified number of lines.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `count` - The number of lines to retain from the end of the input.
|
||||
pub fn new(count: usize) -> Self {
|
||||
Self {
|
||||
lines: VecDeque::with_capacity(count),
|
||||
count,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterPlugin for TailLinesFilter {
|
||||
/// Filters the input by keeping only the last N lines and writing them to the output.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - Mutable reference to the input reader providing the data stream.
|
||||
/// * `writer` - Mutable reference to the output writer where filtered data is sent.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` on success, or an `io::Error` if reading or writing fails.
|
||||
fn filter(&mut self, reader: &mut dyn Read, writer: &mut dyn Write) -> Result<()> {
|
||||
let mut buf_reader = std::io::BufReader::new(reader);
|
||||
for line in buf_reader.by_ref().lines() {
|
||||
let line = line?;
|
||||
if self.lines.len() == self.count {
|
||||
self.lines.pop_front();
|
||||
}
|
||||
self.lines.push_back(line);
|
||||
}
|
||||
|
||||
// Write the buffered lines
|
||||
for line in &self.lines {
|
||||
writeln!(writer, "{}", line)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clones this filter into a new boxed instance.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `Box<dyn FilterPlugin>` representing a clone of this filter.
|
||||
fn clone_box(&self) -> Box<dyn FilterPlugin> {
|
||||
Box::new(Self {
|
||||
lines: self.lines.clone(),
|
||||
count: self.count,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the configuration options for this filter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of `FilterOption` describing the filter's configurable parameters.
|
||||
fn options(&self) -> Vec<FilterOption> {
|
||||
vec![FilterOption {
|
||||
name: "count".to_string(),
|
||||
default: None,
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_tail_filters() {
|
||||
register_filter_plugin("tail_bytes", || Box::new(TailBytesFilter::new(0)));
|
||||
register_filter_plugin("tail_lines", || Box::new(TailLinesFilter::new(0)));
|
||||
}
|
||||
33
src/filter_plugin/utils.rs
Normal file
33
src/filter_plugin/utils.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use std::io::Result;
|
||||
|
||||
/// Creates a filter chain from a filter string specification.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `filter_str` - The string describing the filter chain, such as "head_lines(10)|grep(pattern=error)"
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<Option<super::FilterChain>>` - A result containing:
|
||||
/// * `Ok(Some(FilterChain))` if parsing succeeds
|
||||
/// * `Ok(None)` if the filter string is empty
|
||||
/// * `Err(io::Error)` if the string is invalid
|
||||
pub fn create_filter_chain(filter_str: &str) -> Result<Option<super::FilterChain>> {
|
||||
super::parse_filter_string(filter_str).map(Some)
|
||||
}
|
||||
|
||||
/// Parses a string into a number of type T.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `s` - The string to parse into a number
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<T>` - A result containing:
|
||||
/// * `Ok(T)` - The parsed number on success
|
||||
/// * `Err(io::Error)` - If the string is not a valid number
|
||||
pub fn parse_number<T: std::str::FromStr>(s: &str) -> Result<T> {
|
||||
s.parse::<T>()
|
||||
.map_err(|_| std::io::Error::new(std::io::ErrorKind::InvalidInput, "Invalid number"))
|
||||
}
|
||||
75
src/lib.rs
Normal file
75
src/lib.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
#![deny(clippy::all)]
|
||||
#![deny(unsafe_code)]
|
||||
#![allow(unused_imports)]
|
||||
|
||||
//! Keep library for managing temporary files with compression and metadata.
|
||||
//!
|
||||
//! This library provides core functionality for the Keep application, including
|
||||
//! database operations, compression engines, item services, and plugin systems
|
||||
//! for metadata and filtering. It supports CLI modes, server APIs, and plugin
|
||||
//! registration via ctors.
|
||||
//!
|
||||
//! # Usage
|
||||
//!
|
||||
//! Add to Cargo.toml and use re-exported types:
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
//! keep = "0.1"
|
||||
//! ```
|
||||
//!
|
||||
//! ```rust
|
||||
//! use keep::Args;
|
||||
//! let args = Args::parse();
|
||||
//! ```
|
||||
//!
|
||||
//! # Features
|
||||
//!
|
||||
//! - `server`: Enables Axum-based HTTP server.
|
||||
//! - `gzip`, `lz4`: Built-in compression support.
|
||||
//! - `magic`: File type detection via libmagic.
|
||||
|
||||
// Re-export modules for testing
|
||||
pub mod args;
|
||||
pub mod common;
|
||||
pub mod compression_engine;
|
||||
pub mod config;
|
||||
pub mod db;
|
||||
pub mod filter_plugin;
|
||||
pub mod meta_plugin;
|
||||
pub mod modes;
|
||||
pub mod services;
|
||||
|
||||
// Re-export Args struct for library usage
|
||||
pub use args::Args;
|
||||
// Re-export PIPESIZE constant
|
||||
pub use common::PIPESIZE;
|
||||
|
||||
// Import all filter plugins to ensure they register themselves
|
||||
#[allow(unused_imports)]
|
||||
use filter_plugin::{grep, head, skip, strip_ansi, tail};
|
||||
|
||||
use crate::meta_plugin::{
|
||||
cwd, digest, env, exec, hostname, keep_pid, read_rate, read_time, shell, shell_pid, user,
|
||||
};
|
||||
|
||||
#[cfg(feature = "magic")]
|
||||
#[allow(unused_imports)]
|
||||
use crate::meta_plugin::magic_file;
|
||||
|
||||
/// Initializes plugins at library load time.
|
||||
///
|
||||
/// Ensures all filter and meta plugins are registered via their ctors.
|
||||
/// Call this early in application startup if needed (though ctors handle most cases).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// keep::init_plugins();
|
||||
/// ```
|
||||
pub fn init_plugins() {
|
||||
// This will be expanded in Step 3 implementation
|
||||
// For now, the ctors handle registration
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
401
src/main.rs
401
src/main.rs
@@ -1,218 +1,14 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{Context, Error, Result, anyhow};
|
||||
use clap::*;
|
||||
use clap::error::ErrorKind;
|
||||
use clap::*;
|
||||
use log::*;
|
||||
mod modes;
|
||||
|
||||
extern crate directories;
|
||||
use directories::ProjectDirs;
|
||||
|
||||
extern crate prettytable;
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
extern crate lazy_static;
|
||||
|
||||
pub mod compression_engine;
|
||||
pub mod db;
|
||||
pub mod plugins;
|
||||
pub mod meta_plugin;
|
||||
//pub mod item;
|
||||
|
||||
extern crate term;
|
||||
extern crate serde_json;
|
||||
extern crate serde_yaml;
|
||||
extern crate serde;
|
||||
|
||||
mod common;
|
||||
|
||||
/**
|
||||
* Main struct for command-line arguments.
|
||||
*/
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Args {
|
||||
#[command(flatten)]
|
||||
mode: ModeArgs,
|
||||
#[command(flatten)]
|
||||
item: ItemArgs,
|
||||
#[command(flatten)]
|
||||
options: OptionsArgs,
|
||||
|
||||
#[arg(help("A list of either item IDs or tags"))]
|
||||
ids_or_tags: Vec<NumberOrString>,
|
||||
}
|
||||
|
||||
/**
|
||||
* Struct for mode-specific arguments.
|
||||
*/
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
struct ModeArgs {
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["get", "diff", "list", "update", "delete", "info", "status"]))]
|
||||
#[arg(help("Save an item using any tags or metadata provided"))]
|
||||
save: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "diff", "list", "update", "delete", "info", "status"]))]
|
||||
#[arg(help(
|
||||
"Get an item either by it's ID or by a combination of matching tags and metatdata"
|
||||
))]
|
||||
get: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), long, conflicts_with_all(["save", "get", "list", "update", "delete", "info", "status"]))]
|
||||
#[arg(help("Show a diff between two items by ID"))]
|
||||
diff: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "get", "diff", "update", "delete", "info", "status"]))]
|
||||
#[arg(help("List items, filtering on tags or metadata if given"))]
|
||||
list: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "get", "diff", "list", "delete", "info", "status"]), requires("ids_or_tags"))]
|
||||
#[arg(help("Update a specified item ID's tags and/or metadata"))]
|
||||
update: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "get", "diff", "list", "update", "info", "status"]), requires("ids_or_tags"))]
|
||||
#[arg(help("Delete items either by ID or by matching tags"))]
|
||||
delete: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short, long, conflicts_with_all(["save", "get", "diff", "list", "update", "delete", "status"]), requires("ids_or_tags"))]
|
||||
#[arg(help(
|
||||
"Get an item either by it's ID or by a combination of matching tags and metatdata"
|
||||
))]
|
||||
info: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), short('S'), long, conflicts_with_all(["save", "get", "diff", "list", "update", "delete", "info", "server"]))]
|
||||
#[arg(help("Show status of directories and supported compression algorithms"))]
|
||||
status: bool,
|
||||
|
||||
#[arg(group("mode"), help_heading("Mode Options"), long, conflicts_with_all(["save", "get", "diff", "list", "update", "delete", "info", "status"]))]
|
||||
#[arg(help("Start REST HTTP server on specified address:port or socket path"))]
|
||||
server: Option<String>,
|
||||
}
|
||||
|
||||
/**
|
||||
* Struct for item-specific arguments.
|
||||
*/
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
struct ItemArgs {
|
||||
#[arg(help_heading("Item Options"), short, long, conflicts_with_all(["get", "delete", "status"]))]
|
||||
#[arg(help(
|
||||
"Set metadata for the item using the format KEY=[VALUE], the metadata will be removed if VALUE is not provided"
|
||||
))]
|
||||
meta: Vec<KeyValue>,
|
||||
|
||||
#[arg(help_heading("Item Options"), long, env("KEEP_DIGEST"))]
|
||||
#[arg(help("Digest algorithm to use when saving items"))]
|
||||
digest: Option<String>,
|
||||
|
||||
#[arg(help_heading("Item Options"), short, long, env("KEEP_COMPRESSION"))]
|
||||
#[arg(help("Compression algorithm to use when saving items"))]
|
||||
compression: Option<String>,
|
||||
|
||||
#[arg(help_heading("Item Options"), short('M'), long, env("KEEP_META_PLUGINS"))]
|
||||
#[arg(help("Meta plugins to use when saving items"))]
|
||||
meta_plugins: Vec<String>,
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Struct for general options.
|
||||
*/
|
||||
#[derive(Parser, Debug, Default, Clone)]
|
||||
struct OptionsArgs {
|
||||
#[arg(long, env("KEEP_DIR"))]
|
||||
#[arg(help("Specify the directory to use for storage"))]
|
||||
dir: Option<PathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
env("KEEP_LIST_FORMAT"),
|
||||
default_value("id,time,size,tags,meta:hostname")
|
||||
)]
|
||||
#[arg(help("A comma separated list of columns to display with --list"))]
|
||||
list_format: String,
|
||||
|
||||
#[arg(short('H'), long)]
|
||||
#[arg(help("Display file sizes with units"))]
|
||||
human_readable: bool,
|
||||
|
||||
#[arg(short, long, action = clap::ArgAction::Count, conflicts_with("quiet"))]
|
||||
#[arg(help("Increase message verbosity, can be given more than once"))]
|
||||
verbose: u8,
|
||||
|
||||
#[arg(short, long)]
|
||||
#[arg(help("Do not show any messages"))]
|
||||
quiet: bool,
|
||||
|
||||
#[arg(long, value_enum, default_value("table"))]
|
||||
#[arg(help("Output format (only works with --info, --status, --list)"))]
|
||||
output_format: Option<String>,
|
||||
|
||||
#[arg(long, env("KEEP_SERVER_PASSWORD"))]
|
||||
#[arg(help("Password for server authentication (requires --server)"))]
|
||||
server_password: Option<String>,
|
||||
|
||||
#[arg(long, help("Force output even when binary data would be sent to a TTY"))]
|
||||
force: bool,
|
||||
}
|
||||
|
||||
/**
|
||||
* Enum representing the different modes of operation.
|
||||
*/
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum KeepModes {
|
||||
Unknown,
|
||||
Save,
|
||||
Get,
|
||||
Diff,
|
||||
List,
|
||||
Update,
|
||||
Delete,
|
||||
Info,
|
||||
Status,
|
||||
Server,
|
||||
}
|
||||
|
||||
/**
|
||||
* Struct for key-value pairs.
|
||||
*/
|
||||
#[derive(Debug, Clone)]
|
||||
struct KeyValue {
|
||||
key: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
impl FromStr for KeyValue {
|
||||
type Err = Error;
|
||||
fn from_str(s: &str) -> Result<Self, Error> {
|
||||
match s.split_once('=') {
|
||||
Some(kv) => Ok(KeyValue {
|
||||
key: kv.0.to_string(),
|
||||
value: kv.1.to_string(),
|
||||
}),
|
||||
None => Err(anyhow!("Unable to parse key=value pair")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enum for representing either a number or a string.
|
||||
*/
|
||||
#[derive(Debug, Clone)]
|
||||
enum NumberOrString {
|
||||
Number(i64),
|
||||
Str(String),
|
||||
}
|
||||
|
||||
impl FromStr for NumberOrString {
|
||||
type Err = Error;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(s.parse::<i64>()
|
||||
.map(NumberOrString::Number)
|
||||
.unwrap_or_else(|_| NumberOrString::Str(s.to_string())))
|
||||
}
|
||||
}
|
||||
use keep::args::{Args, NumberOrString};
|
||||
use keep::config::Settings;
|
||||
use keep::db;
|
||||
use keep::modes;
|
||||
|
||||
/**
|
||||
* Main function to handle command-line arguments and execute the appropriate mode.
|
||||
@@ -222,7 +18,12 @@ fn main() -> Result<(), Error> {
|
||||
let proj_dirs = ProjectDirs::from("gt0.ca", "Andrew Phillips", "Keep");
|
||||
|
||||
let mut cmd = Args::command();
|
||||
let mut args = Args::parse();
|
||||
let args = Args::parse();
|
||||
|
||||
// Validate arguments based on mode
|
||||
if let Err(e) = args.validate() {
|
||||
cmd.error(ErrorKind::ValueValidation, e).exit();
|
||||
}
|
||||
|
||||
stderrlog::new()
|
||||
.module(module_path!())
|
||||
@@ -234,19 +35,69 @@ fn main() -> Result<(), Error> {
|
||||
|
||||
debug!("MAIN: Start");
|
||||
|
||||
// Determine default data directory
|
||||
let default_dir = match proj_dirs {
|
||||
Some(ref proj_dirs) => proj_dirs.data_dir().to_path_buf(),
|
||||
None => return Err(anyhow!("Unable to determine data directory")),
|
||||
};
|
||||
|
||||
// Create unified settings using the new config system
|
||||
let settings = Settings::new(&args, default_dir)?;
|
||||
|
||||
debug!("MAIN: Loaded settings: {:?}", settings);
|
||||
|
||||
let ids = &mut Vec::new();
|
||||
let tags = &mut Vec::new();
|
||||
|
||||
// For --info and --get modes, treat numeric strings as IDs
|
||||
for v in args.ids_or_tags.iter() {
|
||||
debug!("MAIN: Parsed value: {:?}", v);
|
||||
match v.clone() {
|
||||
NumberOrString::Number(num) => ids.push(num),
|
||||
NumberOrString::Str(str) => tags.push(str),
|
||||
NumberOrString::Number(num) => {
|
||||
debug!("MAIN: Adding to ids: {}", num);
|
||||
ids.push(num)
|
||||
}
|
||||
NumberOrString::Str(str) => {
|
||||
// For --info and --get, try to parse strings as numbers to treat them as IDs
|
||||
if args.mode.info || args.mode.get {
|
||||
if let Ok(num) = str.parse::<i64>() {
|
||||
debug!("MAIN: Adding parsed string to ids: {}", num);
|
||||
ids.push(num);
|
||||
continue;
|
||||
} else if args.mode.info {
|
||||
// --info only accepts numeric IDs
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
format!("--info requires numeric IDs, found: '{}'", str),
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
}
|
||||
// If not a number, or not using --info/--get, treat as tag
|
||||
debug!("MAIN: Adding to tags: {}", str);
|
||||
tags.push(str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tags.sort();
|
||||
tags.dedup();
|
||||
|
||||
/// Internal enum representing the parsed execution mode.
|
||||
#[derive(PartialEq, Debug)]
|
||||
enum KeepModes {
|
||||
Unknown,
|
||||
Save,
|
||||
Get,
|
||||
Diff,
|
||||
List,
|
||||
Delete,
|
||||
Info,
|
||||
Status,
|
||||
StatusPlugins,
|
||||
Server,
|
||||
GenerateConfig,
|
||||
}
|
||||
|
||||
let mut mode: KeepModes = KeepModes::Unknown;
|
||||
|
||||
if args.mode.save {
|
||||
@@ -259,14 +110,16 @@ fn main() -> Result<(), Error> {
|
||||
mode = KeepModes::List;
|
||||
} else if args.mode.delete {
|
||||
mode = KeepModes::Delete;
|
||||
} else if args.mode.update {
|
||||
mode = KeepModes::Update;
|
||||
} else if args.mode.info {
|
||||
mode = KeepModes::Info;
|
||||
} else if args.mode.status {
|
||||
mode = KeepModes::Status;
|
||||
} else if args.mode.server.is_some() {
|
||||
} else if args.mode.status_plugins {
|
||||
mode = KeepModes::StatusPlugins;
|
||||
} else if args.mode.server {
|
||||
mode = KeepModes::Server;
|
||||
} else if args.mode.generate_config {
|
||||
mode = KeepModes::GenerateConfig;
|
||||
}
|
||||
|
||||
if mode == KeepModes::Unknown {
|
||||
@@ -278,90 +131,120 @@ fn main() -> Result<(), Error> {
|
||||
}
|
||||
|
||||
// Validate output format usage
|
||||
if let Some(output_format_str) = &args.options.output_format {
|
||||
if output_format_str != "table" && mode != KeepModes::Info && mode != KeepModes::Status && mode != KeepModes::List {
|
||||
if let Some(output_format_str) = &settings.output_format
|
||||
&& output_format_str != "table"
|
||||
&& mode != KeepModes::Info
|
||||
&& mode != KeepModes::Status
|
||||
&& mode != KeepModes::StatusPlugins
|
||||
&& mode != KeepModes::List
|
||||
{
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"--output-format can only be used with --info, --status, or --list modes"
|
||||
"--output-format can only be used with --info, --status, --status-plugins, or --list modes"
|
||||
).exit();
|
||||
}
|
||||
}
|
||||
|
||||
// Validate human-readable usage
|
||||
if args.options.human_readable && mode != KeepModes::List && mode != KeepModes::Info {
|
||||
if settings.human_readable && mode != KeepModes::List && mode != KeepModes::Info {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"--human-readable can only be used with --list and --info modes"
|
||||
).exit();
|
||||
"--human-readable can only be used with --list and --info modes",
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
|
||||
// Validate server password usage
|
||||
if args.options.server_password.is_some() && mode != KeepModes::Server {
|
||||
if settings.server_password().is_some() && mode != KeepModes::Server {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"--server-password can only be used with --server mode"
|
||||
).exit();
|
||||
"--server-password can only be used with --server mode",
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
|
||||
debug!("MAIN: args: {:?}", args);
|
||||
debug!("MAIN: ids: {:?}", ids);
|
||||
debug!("MAIN: tags: {:?}", tags);
|
||||
debug!("MAIN: mode: {:?}", mode);
|
||||
|
||||
if args.options.dir.is_none() {
|
||||
match proj_dirs {
|
||||
Some(proj_dirs) => args.options.dir = Some(proj_dirs.data_dir().to_path_buf()),
|
||||
None => return Err(anyhow!("Unable to determine data directory")),
|
||||
}
|
||||
}
|
||||
debug!("MAIN: settings: {:?}", settings);
|
||||
|
||||
unsafe {
|
||||
libc::umask(0o077);
|
||||
}
|
||||
|
||||
let data_path = args.options.dir.clone().unwrap();
|
||||
let data_path = settings.dir.clone();
|
||||
let mut db_path = data_path.clone();
|
||||
db_path.push("keep-1.db");
|
||||
|
||||
debug!("MAIN: Data directory: {:?}", data_path);
|
||||
debug!("MAIN: DB file: {:?}", db_path);
|
||||
|
||||
fs::create_dir_all(data_path.clone()).context("Problem creating data directory")?;
|
||||
debug!("MAIN: Data directory created or already exists");
|
||||
// Ensure data directory exists
|
||||
fs::create_dir_all(&data_path)
|
||||
.with_context(|| format!("Unable to create data directory {:?}", data_path))?;
|
||||
|
||||
let mut conn = db::open(db_path.clone()).context("Problem opening database")?;
|
||||
debug!("MAIN: DB opened successfully");
|
||||
// Initialize database
|
||||
let mut conn = db::open(db_path.clone())?;
|
||||
|
||||
// Parse filter chain early for better error reporting
|
||||
let filter_chain = if let Some(filter_str) = &args.item.filters {
|
||||
match keep::filter_plugin::parse_filter_string(filter_str) {
|
||||
Ok(chain) => Some(chain),
|
||||
Err(e) => {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
format!("Invalid filter string: {}", e),
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match mode {
|
||||
KeepModes::Save => {
|
||||
crate::modes::save::mode_save(&mut cmd, &args, ids, tags, &mut conn, data_path)?
|
||||
}
|
||||
KeepModes::Get => {
|
||||
crate::modes::get::mode_get(&mut cmd, &args, ids, tags, &mut conn, data_path)?
|
||||
}
|
||||
KeepModes::Diff => {
|
||||
crate::modes::diff::mode_diff(&mut cmd, &args, ids, tags, &mut conn, data_path)?
|
||||
modes::save::mode_save(&mut cmd, &settings, ids, tags, &mut conn, data_path)
|
||||
}
|
||||
KeepModes::Get => modes::get::mode_get(
|
||||
&mut cmd,
|
||||
&settings,
|
||||
ids,
|
||||
tags,
|
||||
&mut conn,
|
||||
data_path,
|
||||
filter_chain,
|
||||
),
|
||||
KeepModes::Diff => modes::diff::mode_diff(&mut cmd, &args, &mut conn),
|
||||
KeepModes::List => {
|
||||
crate::modes::list::mode_list(&mut cmd, &args, ids, tags, &mut conn, data_path)?
|
||||
}
|
||||
KeepModes::Update => {
|
||||
crate::modes::update::mode_update(&mut cmd, &args, ids, tags, &mut conn, data_path)?
|
||||
modes::list::mode_list(&mut cmd, &settings, ids, tags, &mut conn, data_path)
|
||||
}
|
||||
KeepModes::Delete => modes::delete::mode_delete(
|
||||
&mut cmd, &settings, &settings, ids, tags, &mut conn, data_path,
|
||||
),
|
||||
KeepModes::Info => {
|
||||
crate::modes::info::mode_info(&mut cmd, &args, ids, tags, &mut conn, data_path)?
|
||||
modes::info::mode_info(&mut cmd, &settings, ids, tags, &mut conn, data_path)
|
||||
}
|
||||
KeepModes::Delete => {
|
||||
crate::modes::delete::mode_delete(&mut cmd, &args, ids, tags, &mut conn, data_path)?
|
||||
}
|
||||
KeepModes::Status => {
|
||||
crate::modes::status::mode_status(&mut cmd, &args, data_path, db_path)?
|
||||
KeepModes::Status => modes::status::mode_status(&mut cmd, &settings, data_path, db_path),
|
||||
KeepModes::StatusPlugins => {
|
||||
modes::status_plugins::mode_status_plugins(&mut cmd, &settings, data_path, db_path)
|
||||
}
|
||||
KeepModes::Server => {
|
||||
crate::modes::server::mode_server(&mut cmd, &args, &mut conn, data_path)?
|
||||
#[cfg(feature = "server")]
|
||||
{
|
||||
modes::server::mode_server(&mut cmd, &settings, &mut conn, data_path)
|
||||
}
|
||||
_ => todo!(),
|
||||
#[cfg(not(feature = "server"))]
|
||||
{
|
||||
cmd.error(
|
||||
ErrorKind::MissingRequiredArgument,
|
||||
"This binary was not compiled with server support. Recompile with --features server"
|
||||
).exit();
|
||||
}
|
||||
}
|
||||
KeepModes::GenerateConfig => {
|
||||
modes::generate_config::mode_generate_config(&mut cmd, &settings)
|
||||
}
|
||||
KeepModes::Unknown => unreachable!(),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
|
||||
pub mod program;
|
||||
pub mod digest;
|
||||
pub mod system;
|
||||
|
||||
|
||||
use crate::meta_plugin::program::MetaPluginProgram;
|
||||
use crate::meta_plugin::digest::{DigestSha256MetaPlugin, ReadTimeMetaPlugin, ReadRateMetaPlugin};
|
||||
use crate::meta_plugin::system::{CwdMetaPlugin, BinaryMetaPlugin, UidMetaPlugin, UserMetaPlugin, GidMetaPlugin, GroupMetaPlugin, ShellMetaPlugin, ShellPidMetaPlugin, KeepPidMetaPlugin, HostnameMetaPlugin, FullHostnameMetaPlugin};
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, strum::EnumIter, strum::Display, strum::EnumString)]
|
||||
#[strum(ascii_case_insensitive)]
|
||||
pub enum MetaPluginType {
|
||||
FileMagic,
|
||||
FileMime,
|
||||
FileEncoding,
|
||||
LineCount,
|
||||
WordCount,
|
||||
Cwd,
|
||||
Binary,
|
||||
Uid,
|
||||
User,
|
||||
Gid,
|
||||
Group,
|
||||
Shell,
|
||||
ShellPid,
|
||||
KeepPid,
|
||||
DigestSha256,
|
||||
DigestMd5,
|
||||
ReadTime,
|
||||
ReadRate,
|
||||
Hostname,
|
||||
FullHostname,
|
||||
}
|
||||
|
||||
pub trait MetaPlugin {
|
||||
fn is_supported(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_internal(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>>;
|
||||
fn finalize(&mut self) -> io::Result<String>;
|
||||
|
||||
// Update the meta plugin with new data
|
||||
fn update(&mut self, data: &[u8]);
|
||||
|
||||
fn meta_name(&mut self) -> String;
|
||||
|
||||
// Get program information for display in status
|
||||
fn program_info(&self) -> Option<(&str, Vec<&str>)> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_meta_plugin(meta_plugin_type: MetaPluginType) -> Box<dyn MetaPlugin> {
|
||||
match meta_plugin_type {
|
||||
MetaPluginType::FileMagic => Box::new(MetaPluginProgram::new("file", vec!["-bE", "-"], "file_magic".to_string(), true)),
|
||||
MetaPluginType::FileMime => Box::new(MetaPluginProgram::new("file", vec!["-b", "--mime-type", "-"], "file_mime".to_string(), true)),
|
||||
MetaPluginType::FileEncoding => Box::new(MetaPluginProgram::new("file", vec!["-b", "--mime-encoding", "-"], "file_encoding".to_string(), true)),
|
||||
MetaPluginType::LineCount => Box::new(MetaPluginProgram::new("wc", vec!["-l"], "line_count".to_string(), true)),
|
||||
MetaPluginType::WordCount => Box::new(MetaPluginProgram::new("wc", vec!["-w"], "word_count".to_string(), true)),
|
||||
MetaPluginType::Cwd => Box::new(CwdMetaPlugin::new()),
|
||||
MetaPluginType::Binary => Box::new(BinaryMetaPlugin::new()),
|
||||
MetaPluginType::Uid => Box::new(UidMetaPlugin::new()),
|
||||
MetaPluginType::User => Box::new(UserMetaPlugin::new()),
|
||||
MetaPluginType::Gid => Box::new(GidMetaPlugin::new()),
|
||||
MetaPluginType::Group => Box::new(GroupMetaPlugin::new()),
|
||||
MetaPluginType::Shell => Box::new(ShellMetaPlugin::new()),
|
||||
MetaPluginType::ShellPid => Box::new(ShellPidMetaPlugin::new()),
|
||||
MetaPluginType::KeepPid => Box::new(KeepPidMetaPlugin::new()),
|
||||
MetaPluginType::DigestSha256 => Box::new(DigestSha256MetaPlugin::new()),
|
||||
MetaPluginType::DigestMd5 => Box::new(MetaPluginProgram::new("md5sum", vec![], "digest_md5".to_string(), true)),
|
||||
MetaPluginType::ReadTime => Box::new(ReadTimeMetaPlugin::new()),
|
||||
MetaPluginType::ReadRate => Box::new(ReadRateMetaPlugin::new()),
|
||||
MetaPluginType::Hostname => Box::new(HostnameMetaPlugin::new()),
|
||||
MetaPluginType::FullHostname => Box::new(FullHostnameMetaPlugin::new()),
|
||||
}
|
||||
}
|
||||
|
||||
128
src/meta_plugin/cwd.rs
Normal file
128
src/meta_plugin/cwd.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
use crate::meta_plugin::{MetaPlugin, MetaPluginType};
|
||||
use std::env;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CwdMetaPlugin {
|
||||
is_finalized: bool,
|
||||
base: crate::meta_plugin::BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl CwdMetaPlugin {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> CwdMetaPlugin {
|
||||
let mut base = crate::meta_plugin::BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = vec!["cwd".to_string()];
|
||||
for output_name in default_outputs {
|
||||
base.outputs
|
||||
.insert(output_name.clone(), serde_yaml::Value::String(output_name));
|
||||
}
|
||||
|
||||
// Apply provided options and outputs
|
||||
if let Some(opts) = options {
|
||||
for (key, value) in opts {
|
||||
base.options.insert(key, value);
|
||||
}
|
||||
}
|
||||
if let Some(outs) = outputs {
|
||||
for (key, value) in outs {
|
||||
base.outputs.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
CwdMetaPlugin {
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for CwdMetaPlugin {
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::Cwd
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
let cwd = match env::current_dir() {
|
||||
Ok(path) => path.to_string_lossy().to_string(),
|
||||
Err(_) => "unknown".to_string(),
|
||||
};
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"cwd",
|
||||
serde_yaml::Value::String(cwd),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_cwd_plugin() {
|
||||
register_meta_plugin(MetaPluginType::Cwd, |options, outputs| {
|
||||
Box::new(CwdMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
@@ -1,159 +1,267 @@
|
||||
use anyhow::Result;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::io;
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginType};
|
||||
use md5;
|
||||
use sha2::{Digest, Sha256, Sha512};
|
||||
use std::io::Write;
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::meta_plugin::MetaPlugin;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct DigestSha256MetaPlugin {
|
||||
hasher: Sha256,
|
||||
meta_name: String,
|
||||
#[derive(Clone)]
|
||||
enum Hasher {
|
||||
Sha256(Sha256),
|
||||
Md5(md5::Context),
|
||||
Sha512(Sha512),
|
||||
}
|
||||
|
||||
impl DigestSha256MetaPlugin {
|
||||
pub fn new() -> DigestSha256MetaPlugin {
|
||||
DigestSha256MetaPlugin {
|
||||
hasher: Sha256::new(),
|
||||
meta_name: "digest_sha256".to_string(),
|
||||
impl Default for Hasher {
|
||||
fn default() -> Self {
|
||||
Hasher::Sha256(Sha256::default())
|
||||
}
|
||||
}
|
||||
|
||||
// Manual Debug implementation to avoid md5::Context not implementing Debug
|
||||
impl std::fmt::Debug for Hasher {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Hasher::Sha256(_) => write!(f, "Hasher::Sha256"),
|
||||
Hasher::Md5(_) => write!(f, "Hasher::Md5"),
|
||||
Hasher::Sha512(_) => write!(f, "Hasher::Sha512"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for DigestSha256MetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
// For meta plugins, we don't actually create a writer since we're buffering data internally
|
||||
// This method is required by the trait but not used in the same way as digest engines
|
||||
Ok(Box::new(DummyWriter))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
let result = self.hasher.clone().finalize();
|
||||
Ok(format!("{:x}", result))
|
||||
}
|
||||
|
||||
impl Hasher {
|
||||
fn update(&mut self, data: &[u8]) {
|
||||
self.hasher.update(data);
|
||||
match self {
|
||||
Hasher::Sha256(hasher) => hasher.update(data),
|
||||
Hasher::Md5(hasher) => {
|
||||
let _ = hasher.write(data);
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
Hasher::Sha512(hasher) => hasher.update(data),
|
||||
}
|
||||
}
|
||||
|
||||
// Dummy writer that implements Write but doesn't do anything
|
||||
// This is needed to satisfy the MetaPlugin trait requirements
|
||||
struct DummyWriter;
|
||||
|
||||
impl Write for DummyWriter {
|
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||
Ok(buf.len())
|
||||
fn finalize(&mut self) -> String {
|
||||
match self {
|
||||
Hasher::Sha256(hasher) => {
|
||||
let result = std::mem::replace(hasher, Sha256::new()).finalize_reset();
|
||||
format!("{:x}", result)
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> io::Result<()> {
|
||||
Ok(())
|
||||
Hasher::Md5(hasher) => {
|
||||
let result = hasher.clone().compute();
|
||||
format!("{:x}", result)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ReadTimeMetaPlugin {
|
||||
start_time: Option<Instant>,
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl ReadTimeMetaPlugin {
|
||||
pub fn new() -> ReadTimeMetaPlugin {
|
||||
ReadTimeMetaPlugin {
|
||||
start_time: None,
|
||||
meta_name: "read_time".to_string(),
|
||||
Hasher::Sha512(hasher) => {
|
||||
let result = std::mem::replace(hasher, Sha512::new()).finalize_reset();
|
||||
format!("{:x}", result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ReadTimeMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
// For meta plugins, we don't actually create a writer since we're buffering data internally
|
||||
Ok(Box::new(DummyWriter))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
if let Some(start_time) = self.start_time {
|
||||
let duration = start_time.elapsed();
|
||||
Ok(format!("{:.6}s", duration.as_secs_f64()))
|
||||
} else {
|
||||
Ok("0.000000s".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
if self.start_time.is_none() {
|
||||
self.start_time = Some(Instant::now());
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ReadRateMetaPlugin {
|
||||
start_time: Option<Instant>,
|
||||
bytes_read: u64,
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl ReadRateMetaPlugin {
|
||||
pub fn new() -> ReadRateMetaPlugin {
|
||||
ReadRateMetaPlugin {
|
||||
start_time: None,
|
||||
bytes_read: 0,
|
||||
meta_name: "read_rate".to_string(),
|
||||
fn output_name(&self) -> &'static str {
|
||||
match self {
|
||||
Hasher::Sha256(_) => "digest_sha256",
|
||||
Hasher::Md5(_) => "digest_md5",
|
||||
Hasher::Sha512(_) => "digest_sha512",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ReadRateMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DigestMetaPlugin {
|
||||
hasher: Option<Hasher>,
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
// For meta plugins, we don't actually create a writer since we're buffering data internally
|
||||
Ok(Box::new(DummyWriter))
|
||||
impl DigestMetaPlugin {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> DigestMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Apply provided options
|
||||
if let Some(opts) = options {
|
||||
for (key, value) in opts {
|
||||
base.options.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
if let Some(start_time) = self.start_time {
|
||||
let duration = start_time.elapsed();
|
||||
if duration.as_secs_f64() > 0.0 {
|
||||
let rate = self.bytes_read as f64 / duration.as_secs_f64();
|
||||
Ok(format!("{:.0} B/s", rate))
|
||||
} else {
|
||||
Ok("0 B/s".to_string())
|
||||
// Determine the selected method
|
||||
let method = if let Some(method_value) = base.options.get("method") {
|
||||
if let Some(method_str) = method_value.as_str() {
|
||||
match method_str {
|
||||
"md5" => "md5",
|
||||
"sha256" => "sha256",
|
||||
"sha512" => "sha512",
|
||||
_ => "sha256",
|
||||
}
|
||||
} else {
|
||||
Ok("0 B/s".to_string())
|
||||
"sha256"
|
||||
}
|
||||
} else {
|
||||
"sha256"
|
||||
};
|
||||
|
||||
// Initialize the hasher based on the method
|
||||
let hasher = match method {
|
||||
"md5" => Some(Hasher::Md5(md5::Context::new())),
|
||||
"sha256" => Some(Hasher::Sha256(Sha256::new())),
|
||||
"sha512" => Some(Hasher::Sha512(Sha512::new())),
|
||||
_ => Some(Hasher::Sha256(Sha256::new())),
|
||||
};
|
||||
|
||||
// Add the method to options so it shows up in the status
|
||||
base.options.insert(
|
||||
"method".to_string(),
|
||||
serde_yaml::Value::String(method.to_string()),
|
||||
);
|
||||
|
||||
// Set outputs based on the selected hash method
|
||||
// Only the selected method's output should be enabled, others should be None
|
||||
let all_outputs = vec!["digest_md5", "digest_sha256", "digest_sha512"];
|
||||
for output_name in &all_outputs {
|
||||
if output_name == &format!("digest_{}", method) {
|
||||
base.outputs.insert(
|
||||
output_name.to_string(),
|
||||
serde_yaml::Value::String(output_name.to_string()),
|
||||
);
|
||||
} else {
|
||||
base.outputs
|
||||
.insert(output_name.to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8]) {
|
||||
if self.start_time.is_none() {
|
||||
self.start_time = Some(Instant::now());
|
||||
// Apply provided outputs, but only for enabled outputs
|
||||
if let Some(outs) = outputs {
|
||||
for (key, value) in outs {
|
||||
// Only update if the output is not disabled (not None)
|
||||
if let Some(current_value) = base.outputs.get_mut(&key)
|
||||
&& !current_value.is_null()
|
||||
{
|
||||
*current_value = value;
|
||||
}
|
||||
}
|
||||
self.bytes_read += data.len() as u64;
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
DigestMetaPlugin {
|
||||
hasher,
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for DigestMetaPlugin {
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Update outputs based on the selected hash method
|
||||
if let Some(hasher) = &mut self.hasher {
|
||||
let hash_value = hasher.finalize();
|
||||
let output_name = hasher.output_name();
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
output_name,
|
||||
serde_yaml::Value::String(hash_value),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
|
||||
// Set all other digest outputs to None
|
||||
let all_outputs = vec!["digest_md5", "digest_sha256", "digest_sha512"];
|
||||
for output_name in all_outputs {
|
||||
if output_name != hasher.output_name() {
|
||||
self.base
|
||||
.outputs
|
||||
.insert(output_name.to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.is_finalized = true;
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Update the active hasher
|
||||
if let Some(hasher) = &mut self.hasher {
|
||||
hasher.update(data);
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::Digest
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec![
|
||||
"digest_md5".to_string(),
|
||||
"digest_sha256".to_string(),
|
||||
"digest_sha512".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_digest_plugin() {
|
||||
register_meta_plugin(MetaPluginType::Digest, |options, outputs| {
|
||||
Box::new(DigestMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
|
||||
227
src/meta_plugin/env.rs
Normal file
227
src/meta_plugin/env.rs
Normal file
@@ -0,0 +1,227 @@
|
||||
use super::{BaseMetaPlugin, MetaPlugin, MetaPluginType, process_metadata_outputs};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
/// Meta plugin that extracts environment variables prefixed with KEEP_META_ as metadata.
|
||||
pub struct EnvMetaPlugin {
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
env_vars: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
impl EnvMetaPlugin {
|
||||
/// Creates a new `EnvMetaPlugin` instance.
|
||||
///
|
||||
/// Collects environment variables starting with KEEP_META_ and sets up default output mappings.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_options` - Optional configuration options for the plugin (unused in this implementation).
|
||||
/// * `outputs` - Optional output mappings for metadata (overrides defaults).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new instance of `EnvMetaPlugin`.
|
||||
pub fn new(
|
||||
_options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> Self {
|
||||
// Collect environment variables starting with KEEP_META_
|
||||
let mut env_vars = Vec::new();
|
||||
let mut outputs_map = std::collections::HashMap::new();
|
||||
|
||||
for (key, value) in std::env::vars() {
|
||||
if let Some(stripped_key) = key.strip_prefix("KEEP_META_") {
|
||||
// Add to env_vars to process later
|
||||
env_vars.push((stripped_key.to_string(), value));
|
||||
// Add to outputs with default mapping to the stripped name
|
||||
outputs_map.insert(
|
||||
stripped_key.to_string(),
|
||||
serde_yaml::Value::String(stripped_key.to_string()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Override with provided outputs
|
||||
if let Some(provided_outputs) = outputs {
|
||||
for (key, value) in provided_outputs {
|
||||
outputs_map.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
base.outputs = outputs_map;
|
||||
|
||||
EnvMetaPlugin {
|
||||
is_finalized: false,
|
||||
base,
|
||||
env_vars,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for EnvMetaPlugin {
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginType::Env`.
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::Env
|
||||
}
|
||||
|
||||
/// Checks if the plugin has been finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if finalized, `false` otherwise.
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
/// Sets the finalized state of the plugin.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `finalized` - The new finalized state.
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
/// Initializes the plugin, processing environment variables.
|
||||
///
|
||||
/// Processes all KEEP_META_* variables and generates metadata using output mappings.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with environment metadata and finalized state set to `true`.
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Process all collected environment variables
|
||||
let mut metadata = Vec::new();
|
||||
for (name, value) in &self.env_vars {
|
||||
if let Some(meta_data) = process_metadata_outputs(
|
||||
name,
|
||||
serde_yaml::Value::String(value.clone()),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark as finalized since this plugin only needs to run once
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates the plugin with new data (unused in this implementation).
|
||||
///
|
||||
/// This plugin does not process streaming data; returns empty response.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_data` - The data chunk (unused).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with empty metadata and current finalized state.
|
||||
fn update(&mut self, _data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Finalizes the plugin, calling initialize if not already done.
|
||||
///
|
||||
/// Ensures environment metadata is processed if not previously initialized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with environment metadata if not finalized, or empty if already done.
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If not already finalized, we can call initialize
|
||||
if !self.is_finalized {
|
||||
return self.initialize();
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of outputs.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of outputs.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
/// Returns the default output names based on collected env vars.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of environment variable names (stripped of KEEP_META_ prefix).
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
self.env_vars.iter().map(|(name, _)| name.clone()).collect()
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping (empty for this plugin).
|
||||
///
|
||||
/// This plugin has no configurable options.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An empty `HashMap`.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics with "options_mut() not implemented for EnvMetaPlugin".
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
/// Registers the EnvMetaPlugin with the global registry at module initialization.
|
||||
#[ctor::ctor]
|
||||
fn register_env_plugin() {
|
||||
register_meta_plugin(MetaPluginType::Env, |options, outputs| {
|
||||
Box::new(EnvMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
306
src/meta_plugin/exec.rs
Normal file
306
src/meta_plugin/exec.rs
Normal file
@@ -0,0 +1,306 @@
|
||||
use log::*;
|
||||
use std::io::{self, Write};
|
||||
use std::process::{Child, Command, Stdio};
|
||||
use which::which;
|
||||
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginResponse, MetaPluginType};
|
||||
|
||||
/// External program execution meta plugin.
|
||||
///
|
||||
/// This plugin executes a specified external command during item save operations,
|
||||
/// capturing its output as metadata. It supports piping input data to the command's stdin
|
||||
/// and processing stdout. Useful for dynamic metadata generation via shell commands.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Configured via options like `command: "date"`, the plugin runs `date` and captures output as metadata.
|
||||
pub struct MetaPluginExec {
|
||||
pub program: String,
|
||||
pub args: Vec<String>,
|
||||
pub supported: bool,
|
||||
pub split_whitespace: bool,
|
||||
process: Option<Child>,
|
||||
writer: Option<Box<dyn Write>>,
|
||||
result: Option<String>,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
// Manual Debug implementation because Box<dyn Write> doesn't implement Debug
|
||||
/// Custom Debug implementation for MetaPluginExec.
|
||||
///
|
||||
/// Obfuscates the writer field since Box<dyn Write> does not implement Debug.
|
||||
impl std::fmt::Debug for MetaPluginExec {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("MetaPluginExec")
|
||||
.field("program", &self.program)
|
||||
.field("args", &self.args)
|
||||
.field("supported", &self.supported)
|
||||
.field("split_whitespace", &self.split_whitespace)
|
||||
.field("process", &self.process)
|
||||
.field("writer", &self.writer.as_ref().map(|_| "Box<dyn Write>"))
|
||||
.field("result", &self.result)
|
||||
.field("base", &self.base)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPluginExec {
|
||||
/// Creates a new MetaPluginExec instance.
|
||||
///
|
||||
/// Validates the program availability using `which` and initializes outputs and options.
|
||||
/// The meta_name determines the default output key for captured command output.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `program` - The executable name or path to run.
|
||||
/// * `args` - Slice of arguments to pass to the program.
|
||||
/// * `meta_name` - Name for the metadata output key.
|
||||
/// * `split_whitespace` - If true, takes the first whitespace-separated word from output; otherwise, trims full output.
|
||||
/// * `_options` - Optional configuration options (currently unused beyond passing through).
|
||||
/// * `outputs` - Optional output mappings to override defaults.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginExec` - New plugin instance, with `supported` set based on program availability.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let plugin = MetaPluginExec::new("date", &[], "date_output", false, None, None);
|
||||
/// ```
|
||||
pub fn new(
|
||||
program: &str,
|
||||
args: &[String],
|
||||
meta_name: String,
|
||||
split_whitespace: bool,
|
||||
_options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> MetaPluginExec {
|
||||
let supported = which(program).is_ok();
|
||||
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default output
|
||||
let default_outputs = &[meta_name.as_str()];
|
||||
base.initialize_plugin(default_outputs, &_options, &outputs);
|
||||
|
||||
MetaPluginExec {
|
||||
program: program.to_string(),
|
||||
args: args.to_vec(),
|
||||
supported,
|
||||
split_whitespace,
|
||||
process: None,
|
||||
writer: None,
|
||||
result: None,
|
||||
base,
|
||||
}
|
||||
}
|
||||
|
||||
/// Starts the external process if not already running.
|
||||
///
|
||||
/// Spawns the command with piped stdin/stdout and stores the child process and writer.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginResponse` - Empty response, initializes the process.
|
||||
fn start_process(&mut self) -> MetaPluginResponse {
|
||||
if self.process.is_some() {
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
};
|
||||
}
|
||||
|
||||
if !self.supported {
|
||||
debug!(
|
||||
"META: Exec plugin: program '{}' not supported",
|
||||
self.program
|
||||
);
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut cmd = Command::new(&self.program);
|
||||
cmd.args(&self.args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped());
|
||||
|
||||
match cmd.spawn() {
|
||||
Ok(mut child) => {
|
||||
let stdin = child.stdin.take().unwrap();
|
||||
self.writer = Some(Box::new(stdin));
|
||||
self.process = Some(child);
|
||||
debug!("META: Exec plugin: started process for '{}'", self.program);
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!(
|
||||
"META: Exec plugin: failed to start '{}': {}",
|
||||
self.program, e
|
||||
);
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for MetaPluginExec {
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::Exec
|
||||
}
|
||||
|
||||
fn is_supported(&self) -> bool {
|
||||
self.supported
|
||||
}
|
||||
|
||||
fn is_internal(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> MetaPluginResponse {
|
||||
self.start_process()
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8]) -> MetaPluginResponse {
|
||||
if let Some(writer) = self.writer.as_mut()
|
||||
&& let Err(e) = writer.write_all(data)
|
||||
{
|
||||
error!("META: Exec plugin: failed to write to stdin: {}", e);
|
||||
}
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> MetaPluginResponse {
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Close stdin if writer exists
|
||||
drop(self.writer.take());
|
||||
|
||||
// Wait for process to complete and capture output
|
||||
if let Some(child) = self.process.take() {
|
||||
match child.wait_with_output() {
|
||||
Ok(output) => {
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let result = if self.split_whitespace {
|
||||
stdout
|
||||
.split_whitespace()
|
||||
.next()
|
||||
.unwrap_or(&stdout)
|
||||
.to_string()
|
||||
} else {
|
||||
stdout.trim().to_string()
|
||||
};
|
||||
|
||||
self.result = Some(result.clone());
|
||||
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
self.base
|
||||
.outputs()
|
||||
.keys()
|
||||
.next()
|
||||
.unwrap_or(&"exec".to_string()),
|
||||
serde_yaml::Value::String(result),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
error!("META: Exec plugin: command failed: {}", stderr);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("META: Exec plugin: failed to wait on process: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn program_info(&self) -> Option<(&str, Vec<&str>)> {
|
||||
let args_str: Vec<&str> = self.args.iter().map(|s| s.as_str()).collect();
|
||||
Some((&self.program, args_str))
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&self.base.outputs
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&mut self.base.outputs
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&self.base.options
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&mut self.base.options
|
||||
}
|
||||
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec!["exec".to_string()]
|
||||
}
|
||||
}
|
||||
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_exec_plugin() {
|
||||
register_meta_plugin(MetaPluginType::Exec, |options, outputs| {
|
||||
// Parse command from options for registration
|
||||
let mut program_name = String::new();
|
||||
let mut args = Vec::new();
|
||||
let mut meta_name = "exec".to_string();
|
||||
let mut split_whitespace = false;
|
||||
|
||||
if let Some(opts) = &options {
|
||||
if let Some(command_value) = opts.get("command")
|
||||
&& let Some(command_str) = command_value.as_str()
|
||||
{
|
||||
let parts: Vec<&str> = command_str.split_whitespace().collect();
|
||||
if !parts.is_empty() {
|
||||
program_name = parts[0].to_string();
|
||||
args = parts[1..].iter().map(|s| s.to_string()).collect();
|
||||
}
|
||||
}
|
||||
if let Some(split_value) = opts.get("split_whitespace")
|
||||
&& let Some(split_bool) = split_value.as_bool()
|
||||
{
|
||||
split_whitespace = split_bool;
|
||||
}
|
||||
if let Some(name_value) = opts.get("name")
|
||||
&& let Some(name_str) = name_value.as_str()
|
||||
{
|
||||
meta_name = name_str.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
Box::new(MetaPluginExec::new(
|
||||
&program_name,
|
||||
&args,
|
||||
meta_name,
|
||||
split_whitespace,
|
||||
options,
|
||||
outputs,
|
||||
))
|
||||
});
|
||||
}
|
||||
406
src/meta_plugin/hostname.rs
Normal file
406
src/meta_plugin/hostname.rs
Normal file
@@ -0,0 +1,406 @@
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginType};
|
||||
|
||||
use smart_default::SmartDefault;
|
||||
|
||||
#[derive(Debug, Clone, SmartDefault)]
|
||||
pub struct HostnameMetaPlugin {
|
||||
#[default = false]
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl HostnameMetaPlugin {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> HostnameMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["hostname", "hostname_full", "hostname_short"];
|
||||
base.initialize_plugin(default_outputs, &options, &outputs);
|
||||
|
||||
// Start with default options - hostname is now boolean only
|
||||
base.options
|
||||
.insert("hostname".to_string(), serde_yaml::Value::Bool(true));
|
||||
base.options
|
||||
.insert("hostname_full".to_string(), serde_yaml::Value::Bool(true));
|
||||
base.options
|
||||
.insert("hostname_short".to_string(), serde_yaml::Value::Bool(true));
|
||||
|
||||
// Override with provided options
|
||||
if let Some(opts) = &options {
|
||||
for (key, value) in opts {
|
||||
// Convert string "true"/"false" to boolean for hostname option
|
||||
if key == "hostname"
|
||||
&& let serde_yaml::Value::String(s) = value
|
||||
{
|
||||
if s == "false" {
|
||||
base.options
|
||||
.insert(key.clone(), serde_yaml::Value::Bool(false));
|
||||
continue;
|
||||
} else if s == "true" {
|
||||
base.options
|
||||
.insert(key.clone(), serde_yaml::Value::Bool(true));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
base.options.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Determine which outputs are enabled based on options
|
||||
let hostname_enabled = base
|
||||
.options
|
||||
.get("hostname")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
|
||||
let hostname_full_enabled = base
|
||||
.options
|
||||
.get("hostname_full")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
|
||||
let hostname_short_enabled = base
|
||||
.options
|
||||
.get("hostname_short")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
|
||||
// Start with default outputs, setting disabled ones to None
|
||||
let mut final_outputs = std::collections::HashMap::new();
|
||||
|
||||
// Handle hostname output
|
||||
if hostname_enabled {
|
||||
final_outputs.insert(
|
||||
"hostname".to_string(),
|
||||
serde_yaml::Value::String("hostname".to_string()),
|
||||
);
|
||||
} else {
|
||||
final_outputs.insert("hostname".to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
|
||||
// Handle hostname_full output
|
||||
if hostname_full_enabled {
|
||||
final_outputs.insert(
|
||||
"hostname_full".to_string(),
|
||||
serde_yaml::Value::String("hostname_full".to_string()),
|
||||
);
|
||||
} else {
|
||||
final_outputs.insert("hostname_full".to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
|
||||
// Handle hostname_short output
|
||||
if hostname_short_enabled {
|
||||
final_outputs.insert(
|
||||
"hostname_short".to_string(),
|
||||
serde_yaml::Value::String("hostname_short".to_string()),
|
||||
);
|
||||
} else {
|
||||
final_outputs.insert("hostname_short".to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
|
||||
// Override with provided outputs, but only if they're enabled
|
||||
if let Some(outs) = &outputs {
|
||||
for (key, value) in outs {
|
||||
// Only add if the output is enabled
|
||||
match key.as_str() {
|
||||
"hostname" => {
|
||||
if hostname_enabled {
|
||||
final_outputs.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
"hostname_full" => {
|
||||
if hostname_full_enabled {
|
||||
final_outputs.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
"hostname_short" => {
|
||||
if hostname_short_enabled {
|
||||
final_outputs.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
final_outputs.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
base.outputs = final_outputs;
|
||||
|
||||
HostnameMetaPlugin {
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_hostname(&self) -> String {
|
||||
// First get the short hostname
|
||||
let short_hostname = match gethostname::gethostname().into_string() {
|
||||
Ok(hostname) => hostname,
|
||||
Err(_) => return "unknown".to_string(),
|
||||
};
|
||||
|
||||
// First try DNS resolution for both IPv4 and IPv6 addresses
|
||||
// lookup_host should handle both A and AAAA records
|
||||
if let Ok(addrs_iter) = dns_lookup::lookup_host(&short_hostname) {
|
||||
// Collect addresses into a Vec to be able to use first()
|
||||
let addrs: Vec<std::net::IpAddr> = addrs_iter.collect();
|
||||
|
||||
// Try each address (both IPv4 and IPv6)
|
||||
for addr in &addrs {
|
||||
// Convert to IpAddr for lookup_addr
|
||||
let ip_addr = match addr {
|
||||
std::net::IpAddr::V4(ipv4) => std::net::IpAddr::V4(*ipv4),
|
||||
std::net::IpAddr::V6(ipv6) => std::net::IpAddr::V6(*ipv6),
|
||||
};
|
||||
// Perform reverse lookup for each address
|
||||
match dns_lookup::lookup_addr(&ip_addr) {
|
||||
Ok(full_hostname) => {
|
||||
// Only use if it's different from the short hostname and looks like a FQDN
|
||||
if full_hostname != short_hostname && full_hostname.contains('.') {
|
||||
return full_hostname;
|
||||
}
|
||||
}
|
||||
Err(_) => continue,
|
||||
}
|
||||
}
|
||||
|
||||
// If no reverse lookup worked, but we have addresses, try to construct FQDN
|
||||
// from the first address's domain if the short hostname is part of a domain
|
||||
if let Some(_first_addr) = addrs.first() {
|
||||
// For local addresses, we might not get a reverse lookup, so try to infer
|
||||
// from the system's domain name
|
||||
if let Ok(domain) = std::process::Command::new("domainname").output()
|
||||
&& domain.status.success()
|
||||
{
|
||||
let domain_str = String::from_utf8_lossy(&domain.stdout).trim().to_string();
|
||||
if !domain_str.is_empty() && domain_str != "(none)" {
|
||||
return format!("{}.{}", short_hostname, domain_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: try to get the FQDN using the system's hostname resolution
|
||||
// This should give us the full hostname if configured
|
||||
if let Ok(full_hostname) = std::process::Command::new("hostname").arg("-f").output()
|
||||
&& full_hostname.status.success()
|
||||
{
|
||||
let full_hostname_str = String::from_utf8_lossy(&full_hostname.stdout)
|
||||
.trim()
|
||||
.to_string();
|
||||
if !full_hostname_str.is_empty() && full_hostname_str != short_hostname {
|
||||
return full_hostname_str;
|
||||
}
|
||||
}
|
||||
|
||||
// Final fallback: return the short hostname
|
||||
short_hostname
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for HostnameMetaPlugin {
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::Hostname
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Get the full hostname
|
||||
let full_hostname = self.get_hostname();
|
||||
let short_hostname = full_hostname
|
||||
.split('.')
|
||||
.next()
|
||||
.unwrap_or(&full_hostname)
|
||||
.to_string();
|
||||
|
||||
// Determine which hostnames to include based on options
|
||||
let hostname_enabled = self
|
||||
.base
|
||||
.options
|
||||
.get("hostname")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
|
||||
let hostname_full_enabled = self
|
||||
.base
|
||||
.options
|
||||
.get("hostname_full")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
|
||||
let hostname_short_enabled = self
|
||||
.base
|
||||
.options
|
||||
.get("hostname_short")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
|
||||
// Always use gethostname() for the 'hostname' output when enabled
|
||||
let hostname_value = if hostname_enabled {
|
||||
gethostname::gethostname()
|
||||
.into_string()
|
||||
.unwrap_or_else(|_| "unknown".to_string())
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
// Prepare metadata to return
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Add enabled metadata to the response using process_metadata_outputs
|
||||
if hostname_enabled
|
||||
&& let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"hostname",
|
||||
serde_yaml::Value::String(hostname_value.clone()),
|
||||
self.base.outputs(),
|
||||
)
|
||||
{
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
if hostname_full_enabled
|
||||
&& let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"hostname_full",
|
||||
serde_yaml::Value::String(full_hostname.clone()),
|
||||
self.base.outputs(),
|
||||
)
|
||||
{
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
if hostname_short_enabled
|
||||
&& let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"hostname_short",
|
||||
serde_yaml::Value::String(short_hostname.clone()),
|
||||
self.base.outputs(),
|
||||
)
|
||||
{
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
|
||||
// Update outputs based on enabled status
|
||||
// Handle hostname output
|
||||
if hostname_enabled {
|
||||
if let Some(output_value) = self.base.outputs_mut().get_mut("hostname") {
|
||||
*output_value = serde_yaml::Value::String(hostname_value);
|
||||
}
|
||||
} else {
|
||||
self.base
|
||||
.outputs_mut()
|
||||
.insert("hostname".to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
|
||||
// Handle hostname_full output
|
||||
if hostname_full_enabled {
|
||||
if let Some(output_value) = self.base.outputs_mut().get_mut("hostname_full") {
|
||||
*output_value = serde_yaml::Value::String(full_hostname);
|
||||
}
|
||||
} else {
|
||||
self.base
|
||||
.outputs_mut()
|
||||
.insert("hostname_full".to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
|
||||
// Handle hostname_short output
|
||||
if hostname_short_enabled {
|
||||
if let Some(output_value) = self.base.outputs_mut().get_mut("hostname_short") {
|
||||
*output_value = serde_yaml::Value::String(short_hostname);
|
||||
}
|
||||
} else {
|
||||
self.base
|
||||
.outputs_mut()
|
||||
.insert("hostname_short".to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
|
||||
// Mark as finalized since this plugin only needs to run once
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec![
|
||||
"hostname".to_string(),
|
||||
"hostname_full".to_string(),
|
||||
"hostname_short".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_hostname_plugin() {
|
||||
register_meta_plugin(MetaPluginType::Hostname, |options, outputs| {
|
||||
Box::new(HostnameMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
204
src/meta_plugin/keep_pid.rs
Normal file
204
src/meta_plugin/keep_pid.rs
Normal file
@@ -0,0 +1,204 @@
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginType};
|
||||
use std::process;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct KeepPidMetaPlugin {
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl KeepPidMetaPlugin {
|
||||
/// Creates a new `KeepPidMetaPlugin` instance.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_options` - Optional configuration options for the plugin (unused in this implementation).
|
||||
/// * `outputs` - Optional output mappings for metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new instance of `KeepPidMetaPlugin`.
|
||||
pub fn new(
|
||||
_options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> KeepPidMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["keep_pid"];
|
||||
base.initialize_plugin(default_outputs, &_options, &outputs);
|
||||
|
||||
KeepPidMetaPlugin {
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for KeepPidMetaPlugin {
|
||||
/// Checks if the plugin has been finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if finalized, `false` otherwise.
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
/// Sets the finalized state of the plugin.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `finalized` - The new finalized state.
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
/// Finalizes the plugin, processing any remaining data if needed.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with empty metadata and finalized state set to `true`.
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates the plugin with new data chunk.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_data` - The data chunk (unused in this implementation).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with empty metadata and finalized state.
|
||||
fn update(&mut self, _data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginType::KeepPid`.
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::KeepPid
|
||||
}
|
||||
|
||||
/// Initializes the plugin and captures the process PID.
|
||||
///
|
||||
/// Retrieves the current process ID and adds it to metadata.
|
||||
/// Marks the plugin as finalized after one run.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginResponse` - Response with PID metadata and finalized state.
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
let pid = process::id().to_string();
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"keep_pid",
|
||||
serde_yaml::Value::String(pid),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
|
||||
// Mark as finalized since this plugin only needs to run once
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of outputs.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of outputs.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
/// Returns the default output names for this plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Vector containing "keep_pid".
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec!["keep_pid".to_string()]
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of options.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of options.
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_keep_pid_plugin() {
|
||||
register_meta_plugin(MetaPluginType::KeepPid, |options, outputs| {
|
||||
Box::new(KeepPidMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
337
src/meta_plugin/magic.rs
Normal file
337
src/meta_plugin/magic.rs
Normal file
@@ -0,0 +1,337 @@
|
||||
use magic::{Cookie, CookieFlags};
|
||||
use std::io;
|
||||
|
||||
use crate::common::PIPESIZE;
|
||||
|
||||
use crate::meta_plugin::{MetaPlugin, MetaPluginType};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MagicFileMetaPlugin {
|
||||
buffer: Vec<u8>,
|
||||
max_buffer_size: usize,
|
||||
is_finalized: bool,
|
||||
cookie: Option<Cookie>,
|
||||
base: crate::meta_plugin::BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl MagicFileMetaPlugin {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> MagicFileMetaPlugin {
|
||||
// Start with default options
|
||||
let mut final_options = std::collections::HashMap::new();
|
||||
final_options.insert("max_buffer_size".to_string(), serde_yaml::Value::Number(PIPESIZE.into()));
|
||||
if let Some(opts) = options {
|
||||
for (key, value) in opts {
|
||||
final_options.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Start with default outputs
|
||||
let mut final_outputs = std::collections::HashMap::new();
|
||||
let default_outputs = vec!["mime_type".to_string(), "mime_encoding".to_string(), "file_type".to_string()];
|
||||
for output_name in default_outputs {
|
||||
final_outputs.insert(output_name.clone(), serde_yaml::Value::String(output_name));
|
||||
}
|
||||
if let Some(outs) = outputs {
|
||||
for (key, value) in outs {
|
||||
final_outputs.insert(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
let max_buffer_size = final_options.get("max_buffer_size")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(PIPESIZE as u64) as usize;
|
||||
|
||||
// Ensure the default max_buffer_size is in the options
|
||||
if !final_options.contains_key("max_buffer_size") {
|
||||
final_options.insert("max_buffer_size".to_string(), serde_yaml::Value::Number(PIPESIZE.into()));
|
||||
}
|
||||
|
||||
let mut base = crate::meta_plugin::BaseMetaPlugin::new();
|
||||
base.outputs = final_outputs;
|
||||
base.options = final_options;
|
||||
|
||||
MagicFileMetaPlugin {
|
||||
buffer: Vec::new(),
|
||||
max_buffer_size,
|
||||
is_finalized: false,
|
||||
cookie: None,
|
||||
base,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn get_magic_result(&self, flags: CookieFlags) -> io::Result<String> {
|
||||
// Use the existing cookie and just change flags
|
||||
if let Some(cookie) = &self.cookie {
|
||||
cookie.set_flags(flags)
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, format!("Failed to set magic flags: {}", e)))?;
|
||||
|
||||
let result = cookie.buffer(&self.buffer)
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, format!("Failed to analyze buffer: {}", e)))?;
|
||||
|
||||
// Clean up the result - remove extra whitespace and take first part if needed
|
||||
let trimmed = result.trim();
|
||||
|
||||
// For some magic results, we might want just the first part before semicolon or comma
|
||||
let cleaned = if trimmed.contains(';') {
|
||||
trimmed.split(';').next().unwrap_or(trimmed).trim()
|
||||
} else if trimmed.contains(',') && flags.contains(CookieFlags::MIME_TYPE | CookieFlags::MIME_ENCODING) {
|
||||
trimmed.split(',').next().unwrap_or(trimmed).trim()
|
||||
} else {
|
||||
trimmed
|
||||
};
|
||||
|
||||
Ok(cleaned.to_string())
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "Magic cookie not initialized"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to process all magic types and collect metadata
|
||||
fn process_magic_types(&self) -> Vec<crate::meta_plugin::MetaData> {
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Define the types to process with their corresponding flags
|
||||
let types_to_process = [
|
||||
("mime_type", CookieFlags::MIME_TYPE),
|
||||
("mime_encoding", CookieFlags::MIME_ENCODING),
|
||||
("file_type", CookieFlags::default()),
|
||||
];
|
||||
|
||||
for (name, flags) in types_to_process.iter() {
|
||||
if let Ok(result) = self.get_magic_result(*flags) {
|
||||
if !result.is_empty() {
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
name,
|
||||
serde_yaml::Value::String(result),
|
||||
self.base.outputs()
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for MagicFileMetaPlugin {
|
||||
/// Checks if the plugin has been finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if finalized, `false` otherwise.
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
/// Sets the finalized state of the plugin.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `finalized` - The new finalized state.
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
/// Initializes the magic cookie for file type detection.
|
||||
///
|
||||
/// Loads the magic database; finalizes if initialization fails.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with empty metadata; `is_finalized` is `true` on failure.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Logs errors; returns finalized response on cookie or load failure.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut plugin = MagicFileMetaPlugin::new(None, None);
|
||||
/// let response = plugin.initialize();
|
||||
/// ```
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// Initialize the magic cookie once
|
||||
let cookie = match Cookie::open(Default::default()) {
|
||||
Ok(cookie) => cookie,
|
||||
Err(_e) => {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
};
|
||||
if let Err(_e) = cookie.load(&[] as &[&str]) {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
self.cookie = Some(cookie);
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Finalizes the plugin and performs file type detection.
|
||||
///
|
||||
/// Analyzes the accumulated buffer and outputs detected types.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with detection metadata and finalized state set to `true`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut plugin = MagicFileMetaPlugin::new(None, None);
|
||||
/// // ... after updates
|
||||
/// let response = plugin.finalize();
|
||||
/// assert!(response.is_finalized);
|
||||
/// ```
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let metadata = self.process_magic_types();
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates the plugin with new data, accumulating for analysis.
|
||||
///
|
||||
/// Buffers data up to `max_buffer_size`; triggers detection when full.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `data` - Content chunk to buffer.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with metadata on buffer full; finalizes then.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut plugin = MagicFileMetaPlugin::new(None, None);
|
||||
/// let response = plugin.update(b"content");
|
||||
/// ```
|
||||
fn update(&mut self, data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Only collect up to max_buffer_size
|
||||
let remaining_capacity = self.max_buffer_size.saturating_sub(self.buffer.len());
|
||||
if remaining_capacity > 0 {
|
||||
let bytes_to_copy = std::cmp::min(data.len(), remaining_capacity);
|
||||
self.buffer.extend_from_slice(&data[..bytes_to_copy]);
|
||||
|
||||
// Check if we've reached our buffer limit and return metadata
|
||||
if self.buffer.len() >= self.max_buffer_size {
|
||||
metadata = self.process_magic_types();
|
||||
|
||||
// Mark as finalized when we've processed enough data
|
||||
self.is_finalized = true;
|
||||
}
|
||||
}
|
||||
|
||||
let is_finalized = !metadata.is_empty();
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginType::MagicFile`.
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::MagicFile
|
||||
}
|
||||
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of outputs.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of outputs.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
/// Returns the default output names for this plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Vector of default output field names.
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec!["mime_type".to_string(), "mime_encoding".to_string(), "file_type".to_string()]
|
||||
}
|
||||
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of options.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of options.
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_magic_file_plugin() {
|
||||
register_meta_plugin(MetaPluginType::MagicFile, |options, outputs| {
|
||||
Box::new(MagicFileMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
443
src/meta_plugin/magic_file.rs
Normal file
443
src/meta_plugin/magic_file.rs
Normal file
@@ -0,0 +1,443 @@
|
||||
#[cfg(feature = "magic")]
|
||||
use magic::{Cookie, CookieFlags};
|
||||
#[cfg(not(feature = "magic"))]
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
use log::debug;
|
||||
use std::io::{self, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::meta_plugin::{
|
||||
BaseMetaPlugin, MetaData, MetaPlugin, MetaPluginResponse, MetaPluginType,
|
||||
process_metadata_outputs,
|
||||
};
|
||||
|
||||
#[cfg(feature = "magic")]
|
||||
#[derive(Debug)]
|
||||
pub struct MagicFileMetaPluginImpl {
|
||||
buffer: Vec<u8>,
|
||||
max_buffer_size: usize,
|
||||
is_finalized: bool,
|
||||
cookie: Option<Cookie>,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
#[cfg(feature = "magic")]
|
||||
impl MagicFileMetaPluginImpl {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> MagicFileMetaPluginImpl {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["mime_type", "mime_encoding", "file_type"];
|
||||
base.initialize_plugin(default_outputs, &options, &outputs);
|
||||
|
||||
// Get max_buffer_size from options, default to PIPESIZE
|
||||
let max_buffer_size = base
|
||||
.options
|
||||
.get("max_buffer_size")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(crate::common::PIPESIZE as u64) as usize;
|
||||
|
||||
MagicFileMetaPluginImpl {
|
||||
buffer: Vec::new(),
|
||||
max_buffer_size,
|
||||
is_finalized: false,
|
||||
cookie: None,
|
||||
base,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_magic_result(&self, flags: CookieFlags) -> io::Result<String> {
|
||||
if let Some(cookie) = &self.cookie {
|
||||
cookie
|
||||
.set_flags(flags)
|
||||
.map_err(|e| io::Error::other(format!("Failed to set magic flags: {}", e)))?;
|
||||
|
||||
let result = cookie
|
||||
.buffer(&self.buffer)
|
||||
.map_err(|e| io::Error::other(format!("Failed to analyze buffer: {}", e)))?;
|
||||
|
||||
// Clean up the result - remove extra whitespace
|
||||
let trimmed = result.trim().to_string();
|
||||
|
||||
Ok(trimmed)
|
||||
} else {
|
||||
Err(io::Error::other("Magic cookie not initialized"))
|
||||
}
|
||||
}
|
||||
|
||||
fn process_magic_types(&self) -> Vec<MetaData> {
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
let types_to_process = [
|
||||
("mime_type", CookieFlags::MIME_TYPE),
|
||||
("mime_encoding", CookieFlags::MIME_ENCODING),
|
||||
("file_type", CookieFlags::empty()),
|
||||
];
|
||||
|
||||
for (name, flags) in types_to_process.iter() {
|
||||
if let Ok(result) = self.get_magic_result(*flags)
|
||||
&& !result.is_empty()
|
||||
&& let Some(meta_data) = process_metadata_outputs(
|
||||
name,
|
||||
serde_yaml::Value::String(result),
|
||||
self.base.outputs(),
|
||||
)
|
||||
{
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "magic")]
|
||||
impl MetaPlugin for MagicFileMetaPluginImpl {
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> MetaPluginResponse {
|
||||
let cookie = match Cookie::open(CookieFlags::default()) {
|
||||
Ok(cookie) => cookie,
|
||||
Err(e) => {
|
||||
debug!("META: MagicFile plugin: failed to create cookie: {}", e);
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = cookie.load(&[] as &[&Path]) {
|
||||
debug!(
|
||||
"META: MagicFile plugin: failed to load magic database: {}",
|
||||
e
|
||||
);
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
self.cookie = Some(cookie);
|
||||
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8]) -> MetaPluginResponse {
|
||||
if self.is_finalized {
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let remaining_capacity = self.max_buffer_size.saturating_sub(self.buffer.len());
|
||||
if remaining_capacity > 0 {
|
||||
let bytes_to_copy = std::cmp::min(data.len(), remaining_capacity);
|
||||
self.buffer.extend_from_slice(&data[..bytes_to_copy]);
|
||||
|
||||
if self.buffer.len() >= self.max_buffer_size {
|
||||
let metadata = self.process_magic_types();
|
||||
self.is_finalized = true;
|
||||
return MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> MetaPluginResponse {
|
||||
if self.is_finalized {
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let metadata = self.process_magic_types();
|
||||
self.is_finalized = true;
|
||||
|
||||
MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::MagicFile
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec![
|
||||
"mime_type".to_string(),
|
||||
"mime_encoding".to_string(),
|
||||
"file_type".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "magic"))]
|
||||
#[derive(Debug)]
|
||||
pub struct FallbackMagicFileMetaPlugin {
|
||||
buffer: Vec<u8>,
|
||||
max_buffer_size: usize,
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "magic"))]
|
||||
impl FallbackMagicFileMetaPlugin {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> FallbackMagicFileMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["mime_type", "mime_encoding", "file_type"];
|
||||
base.initialize_plugin(default_outputs, &options, &outputs);
|
||||
|
||||
// Get max_buffer_size from options, default to PIPESIZE
|
||||
let max_buffer_size = base
|
||||
.options
|
||||
.get("max_buffer_size")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(crate::common::PIPESIZE as u64) as usize;
|
||||
|
||||
FallbackMagicFileMetaPlugin {
|
||||
buffer: Vec::new(),
|
||||
max_buffer_size,
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
|
||||
fn run_file_command(&self, buffer: &[u8]) -> io::Result<String> {
|
||||
let mut temp_file = tempfile::NamedTempFile::new()?;
|
||||
temp_file.as_ref().write_all(buffer)?;
|
||||
|
||||
let output = Command::new("file")
|
||||
.arg("-b")
|
||||
.arg("-m")
|
||||
.arg("all")
|
||||
.arg(temp_file.path())
|
||||
.output()
|
||||
.map_err(|e| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!("Failed to run file command: {}", e),
|
||||
)
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "File command failed"));
|
||||
}
|
||||
|
||||
let result = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn process_file_output(&self, result: &str) -> Vec<MetaData> {
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Parse the file command output
|
||||
// file -m all output format is typically: type; charset=encoding
|
||||
let parts: Vec<&str> = result.split(';').map(|s| s.trim()).collect();
|
||||
let file_type = parts.first().cloned().unwrap_or(result);
|
||||
let mime_encoding = parts
|
||||
.get(1)
|
||||
.and_then(|s| s.strip_prefix("charset="))
|
||||
.cloned()
|
||||
.unwrap_or("");
|
||||
|
||||
// For mime_type, try to infer from file type or use a heuristic
|
||||
let mime_type = if file_type.starts_with("text") {
|
||||
"text/plain"
|
||||
} else if file_type.contains("ASCII") || file_type.contains("UTF-8") {
|
||||
"text/plain"
|
||||
} else if file_type.contains("empty") {
|
||||
"application/octet-stream"
|
||||
} else {
|
||||
"application/octet-stream" // default
|
||||
};
|
||||
|
||||
let outputs_to_process = [
|
||||
("mime_type", mime_type),
|
||||
("mime_encoding", mime_encoding),
|
||||
("file_type", file_type),
|
||||
];
|
||||
|
||||
for (name, value) in outputs_to_process.iter() {
|
||||
if let Some(meta_data) = process_metadata_outputs(
|
||||
name,
|
||||
serde_yaml::Value::String(value.to_string()),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "magic"))]
|
||||
impl MetaPlugin for FallbackMagicFileMetaPlugin {
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> MetaPluginResponse {
|
||||
// No initialization needed for fallback
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8]) -> MetaPluginResponse {
|
||||
if self.is_finalized {
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let remaining_capacity = self.max_buffer_size.saturating_sub(self.buffer.len());
|
||||
if remaining_capacity > 0 {
|
||||
let bytes_to_copy = std::cmp::min(data.len(), remaining_capacity);
|
||||
self.buffer.extend_from_slice(&data[..bytes_to_copy]);
|
||||
|
||||
if self.buffer.len() >= self.max_buffer_size {
|
||||
if let Ok(result) = self.run_file_command(&self.buffer) {
|
||||
let metadata = self.process_file_output(&result);
|
||||
self.is_finalized = true;
|
||||
return MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
};
|
||||
} else {
|
||||
// On error, finalize with empty metadata
|
||||
self.is_finalized = true;
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> MetaPluginResponse {
|
||||
if self.is_finalized {
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let metadata = if !self.buffer.is_empty() {
|
||||
if let Ok(result) = self.run_file_command(&self.buffer) {
|
||||
self.process_file_output(&result)
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
self.is_finalized = true;
|
||||
|
||||
MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::MagicFile
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec![
|
||||
"mime_type".to_string(),
|
||||
"mime_encoding".to_string(),
|
||||
"file_type".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "magic")]
|
||||
pub use MagicFileMetaPluginImpl as MagicFileMetaPlugin;
|
||||
|
||||
#[cfg(not(feature = "magic"))]
|
||||
pub use FallbackMagicFileMetaPlugin as MagicFileMetaPlugin;
|
||||
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
#[ctor::ctor]
|
||||
fn register_magic_file_plugin() {
|
||||
register_meta_plugin(MetaPluginType::MagicFile, |options, outputs| {
|
||||
Box::new(MagicFileMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
511
src/meta_plugin/mod.rs
Normal file
511
src/meta_plugin/mod.rs
Normal file
@@ -0,0 +1,511 @@
|
||||
use log::debug;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub mod cwd;
|
||||
pub mod digest;
|
||||
pub mod env;
|
||||
pub mod exec;
|
||||
pub mod hostname;
|
||||
pub mod keep_pid;
|
||||
#[cfg(feature = "magic")]
|
||||
pub mod magic_file;
|
||||
pub mod read_rate;
|
||||
pub mod read_time;
|
||||
pub mod shell;
|
||||
pub mod shell_pid;
|
||||
pub mod text;
|
||||
pub mod user;
|
||||
// pub mod text; // Removed duplicate
|
||||
|
||||
pub use digest::DigestMetaPlugin;
|
||||
pub use exec::MetaPluginExec;
|
||||
#[cfg(feature = "magic")]
|
||||
pub use magic_file::MagicFileMetaPlugin;
|
||||
// pub use text::TextMetaPlugin; // Removed duplicate
|
||||
pub use cwd::CwdMetaPlugin;
|
||||
pub use env::EnvMetaPlugin;
|
||||
pub use hostname::HostnameMetaPlugin;
|
||||
pub use keep_pid::KeepPidMetaPlugin;
|
||||
pub use read_rate::ReadRateMetaPlugin;
|
||||
pub use read_time::ReadTimeMetaPlugin;
|
||||
pub use shell::ShellMetaPlugin;
|
||||
pub use shell_pid::ShellPidMetaPlugin;
|
||||
pub use user::UserMetaPlugin;
|
||||
|
||||
#[cfg(not(feature = "magic"))]
|
||||
pub use magic_file::FallbackMagicFileMetaPlugin as MagicFileMetaPlugin;
|
||||
|
||||
type PluginConstructor = fn(
|
||||
Option<HashMap<String, serde_yaml::Value>>,
|
||||
Option<HashMap<String, serde_yaml::Value>>,
|
||||
) -> Box<dyn MetaPlugin>;
|
||||
|
||||
/// Represents metadata to be stored.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MetaData {
|
||||
/// The name of the metadata field.
|
||||
pub name: String,
|
||||
/// The value of the metadata field.
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
/// Response from meta plugin operations.
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
pub struct MetaPluginResponse {
|
||||
/// The generated metadata items.
|
||||
pub metadata: Vec<MetaData>,
|
||||
/// Indicates if the plugin has finished processing.
|
||||
pub is_finalized: bool,
|
||||
}
|
||||
|
||||
/// Base implementation for meta plugins to reduce boilerplate.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct BaseMetaPlugin {
|
||||
/// Output mappings for metadata.
|
||||
pub outputs: std::collections::HashMap<String, serde_yaml::Value>,
|
||||
/// Configuration options for the plugin.
|
||||
pub options: std::collections::HashMap<String, serde_yaml::Value>,
|
||||
/// Whether the plugin is finalized.
|
||||
pub is_finalized: bool,
|
||||
}
|
||||
|
||||
impl BaseMetaPlugin {
|
||||
/// Creates a new `BaseMetaPlugin`.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new instance of `BaseMetaPlugin`.
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of outputs.
|
||||
pub fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&self.outputs
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of outputs.
|
||||
pub fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&mut self.outputs
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of options.
|
||||
pub fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&self.options
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of options.
|
||||
pub fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&mut self.options
|
||||
}
|
||||
|
||||
/// Helper function to initialize plugin options and outputs.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `default_outputs` - Slice of default output names.
|
||||
/// * `options` - Optional user-provided options.
|
||||
/// * `outputs` - Optional user-provided outputs.
|
||||
pub fn initialize_plugin(
|
||||
&mut self,
|
||||
default_outputs: &[&str],
|
||||
options: &Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: &Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) {
|
||||
// Set default outputs
|
||||
for output_name in default_outputs {
|
||||
self.outputs.insert(
|
||||
output_name.to_string(),
|
||||
serde_yaml::Value::String(output_name.to_string()),
|
||||
);
|
||||
}
|
||||
|
||||
// Apply provided options and outputs
|
||||
if let Some(opts) = options {
|
||||
for (key, value) in opts {
|
||||
self.options.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
if let Some(outs) = outputs {
|
||||
for (key, value) in outs {
|
||||
self.outputs.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for BaseMetaPlugin {
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginType::Text` (default for base).
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
// This is a base implementation, so we need to return something
|
||||
// This might not be used, but we need to satisfy the trait
|
||||
MetaPluginType::Text
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of outputs.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&self.outputs
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of outputs.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&mut self.outputs
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of options.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&self.options
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of options.
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
&mut self.options
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Clone,
|
||||
Hash,
|
||||
strum::EnumIter,
|
||||
strum::Display,
|
||||
strum::EnumString,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
)]
|
||||
#[strum(serialize_all = "snake_case", ascii_case_insensitive)]
|
||||
pub enum MetaPluginType {
|
||||
MagicFile,
|
||||
Cwd,
|
||||
Text,
|
||||
User,
|
||||
Shell,
|
||||
ShellPid,
|
||||
KeepPid,
|
||||
Digest,
|
||||
ReadTime,
|
||||
ReadRate,
|
||||
Hostname,
|
||||
Exec,
|
||||
Env,
|
||||
}
|
||||
|
||||
/// Central function to handle metadata output with name mapping.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `internal_name` - The internal name of the metadata.
|
||||
/// * `value` - The value to process.
|
||||
/// * `outputs` - The outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An optional `MetaData` if the output is enabled, `None` if disabled.
|
||||
pub fn process_metadata_outputs(
|
||||
internal_name: &str,
|
||||
value: serde_yaml::Value,
|
||||
outputs: &std::collections::HashMap<String, serde_yaml::Value>,
|
||||
) -> Option<MetaData> {
|
||||
// Check if this output is disabled
|
||||
if let Some(mapping) = outputs.get(internal_name) {
|
||||
// Check for null to disable the output
|
||||
if mapping.is_null() {
|
||||
debug!("META: Skipping disabled output (null): {}", internal_name);
|
||||
return None;
|
||||
}
|
||||
// Check for boolean false to disable the output
|
||||
if let Some(false_val) = mapping.as_bool()
|
||||
&& !false_val
|
||||
{
|
||||
debug!("META: Skipping disabled output: {}", internal_name);
|
||||
return None;
|
||||
}
|
||||
if let Some(custom_name) = mapping.as_str() {
|
||||
// Convert the value to a string representation
|
||||
let value_str = match &value {
|
||||
serde_yaml::Value::Null => "null".to_string(),
|
||||
serde_yaml::Value::Bool(b) => b.to_string(),
|
||||
serde_yaml::Value::Number(n) => n.to_string(),
|
||||
serde_yaml::Value::String(s) => s.clone(),
|
||||
serde_yaml::Value::Sequence(_) => {
|
||||
serde_yaml::to_string(&value).unwrap_or_else(|_| "".to_string())
|
||||
}
|
||||
serde_yaml::Value::Mapping(_) => {
|
||||
serde_yaml::to_string(&value).unwrap_or_else(|_| "".to_string())
|
||||
}
|
||||
serde_yaml::Value::Tagged(_) => {
|
||||
serde_yaml::to_string(&value).unwrap_or_else(|_| "".to_string())
|
||||
}
|
||||
};
|
||||
debug!(
|
||||
"META: Processing metadata: internal_name={}, custom_name={}, value={}",
|
||||
internal_name, custom_name, value_str
|
||||
);
|
||||
return Some(MetaData {
|
||||
name: custom_name.to_string(),
|
||||
value: value_str,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the value to a string representation
|
||||
let value_str = match &value {
|
||||
serde_yaml::Value::Null => "null".to_string(),
|
||||
serde_yaml::Value::Bool(b) => b.to_string(),
|
||||
serde_yaml::Value::Number(n) => n.to_string(),
|
||||
serde_yaml::Value::String(s) => s.clone(),
|
||||
serde_yaml::Value::Sequence(_) => {
|
||||
serde_yaml::to_string(&value).unwrap_or_else(|_| "".to_string())
|
||||
}
|
||||
serde_yaml::Value::Mapping(_) => {
|
||||
serde_yaml::to_string(&value).unwrap_or_else(|_| "".to_string())
|
||||
}
|
||||
serde_yaml::Value::Tagged(_) => {
|
||||
serde_yaml::to_string(&value).unwrap_or_else(|_| "".to_string())
|
||||
}
|
||||
};
|
||||
|
||||
// Default: use internal name as output name
|
||||
debug!(
|
||||
"META: Processing metadata: name={}, value={}",
|
||||
internal_name, value_str
|
||||
);
|
||||
Some(MetaData {
|
||||
name: internal_name.to_string(),
|
||||
value: value_str,
|
||||
})
|
||||
}
|
||||
|
||||
pub trait MetaPlugin
|
||||
where
|
||||
Self: 'static,
|
||||
{
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The `MetaPluginType` enum variant for this plugin.
|
||||
fn meta_type(&self) -> MetaPluginType;
|
||||
|
||||
/// Checks if the plugin is supported on the current system.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if supported, `false` otherwise.
|
||||
fn is_supported(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Checks if the plugin is internal (built-in).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if internal, `false` otherwise.
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Checks if the plugin is already finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if finalized, `false` otherwise.
|
||||
fn is_finalized(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Sets the finalized state (only for plugins that can track this).
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_finalized` - The new finalized state (unused in default).
|
||||
fn set_finalized(&mut self, _finalized: bool) {}
|
||||
|
||||
/// Updates the meta plugin with new data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_data` - The data chunk to process (unused in default).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with empty metadata and `is_finalized` set to `false`.
|
||||
fn update(&mut self, _data: &[u8]) -> MetaPluginResponse {
|
||||
// Default implementation does nothing
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Finalizes the plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with empty metadata and `is_finalized` set to `true`.
|
||||
fn finalize(&mut self) -> MetaPluginResponse {
|
||||
// Default implementation does nothing
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets program information for display in status.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An optional tuple of program name and arguments, or `None`.
|
||||
fn program_info(&self) -> Option<(&str, Vec<&str>)> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Initializes the plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with empty metadata and `is_finalized` set to `false`.
|
||||
fn initialize(&mut self) -> MetaPluginResponse {
|
||||
// Default implementation does nothing
|
||||
MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An empty `HashMap` (default implementation).
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
use once_cell::sync::Lazy;
|
||||
static EMPTY: Lazy<std::collections::HashMap<String, serde_yaml::Value>> =
|
||||
Lazy::new(std::collections::HashMap::new);
|
||||
&EMPTY
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics with "outputs_mut() not implemented for this plugin".
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
panic!("outputs_mut() not implemented for this plugin")
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An empty `HashMap` (default implementation).
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
use once_cell::sync::Lazy;
|
||||
static EMPTY: Lazy<std::collections::HashMap<String, serde_yaml::Value>> =
|
||||
Lazy::new(std::collections::HashMap::new);
|
||||
&EMPTY
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics with "options_mut() not implemented for this plugin".
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
panic!("options_mut() not implemented for this plugin")
|
||||
}
|
||||
|
||||
/// Gets the default output names this plugin can produce.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector containing the meta type as a string (default).
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
// Default implementation returns the meta type as a string
|
||||
vec![self.meta_type().to_string()]
|
||||
}
|
||||
|
||||
/// Method to downcast to concrete type (for checking finalization state).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to `self` as `dyn Any`.
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Global registry for meta plugins.
|
||||
static META_PLUGIN_REGISTRY: Lazy<Mutex<HashMap<MetaPluginType, PluginConstructor>>> =
|
||||
Lazy::new(|| Mutex::new(HashMap::new()));
|
||||
|
||||
/// Register a meta plugin with the global registry.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `meta_plugin_type` - The type of the meta plugin to register.
|
||||
/// * `constructor` - The constructor function for creating plugin instances.
|
||||
pub fn register_meta_plugin(meta_plugin_type: MetaPluginType, constructor: PluginConstructor) {
|
||||
META_PLUGIN_REGISTRY
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(meta_plugin_type, constructor);
|
||||
}
|
||||
|
||||
pub fn get_meta_plugin(
|
||||
meta_plugin_type: MetaPluginType,
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> Box<dyn MetaPlugin> {
|
||||
let registry = META_PLUGIN_REGISTRY.lock().unwrap();
|
||||
if let Some(constructor) = registry.get(&meta_plugin_type) {
|
||||
return constructor(options, outputs);
|
||||
}
|
||||
|
||||
// Fallback for unknown plugins
|
||||
panic!("Meta plugin {:?} not registered", meta_plugin_type);
|
||||
}
|
||||
@@ -1,151 +0,0 @@
|
||||
use crate::plugins::ProgramWriter;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use log::*;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
use crate::meta_plugin::MetaPlugin;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MetaPluginProgram {
|
||||
pub program: String,
|
||||
pub args: Vec<String>,
|
||||
pub supported: bool,
|
||||
pub meta_name: String,
|
||||
pub split_whitespace: bool,
|
||||
buffer: Vec<u8>,
|
||||
}
|
||||
|
||||
impl MetaPluginProgram {
|
||||
pub fn new(program: &str, args: Vec<&str>, meta_name: String, split_whitespace: bool) -> MetaPluginProgram {
|
||||
let program_path = get_program_path(program);
|
||||
let supported = program_path.is_ok();
|
||||
|
||||
MetaPluginProgram {
|
||||
program: program_path.unwrap_or(program.to_string()),
|
||||
args: args.iter().map(|s| s.to_string()).collect(),
|
||||
supported,
|
||||
meta_name,
|
||||
split_whitespace,
|
||||
buffer: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for MetaPluginProgram {
|
||||
fn is_supported(&self) -> bool {
|
||||
self.supported
|
||||
}
|
||||
|
||||
fn is_internal(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
debug!("META: Writing using {:?}", *self);
|
||||
|
||||
let program = self.program.clone();
|
||||
let args = self.args.clone();
|
||||
|
||||
debug!("META: Executing command: {:?} {:?}", program, args);
|
||||
|
||||
let mut process = Command::new(program.clone())
|
||||
.args(args.clone())
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.context(anyhow!(
|
||||
"Problem spawning child process: {:?} {:?}",
|
||||
program,
|
||||
args
|
||||
))?;
|
||||
|
||||
Ok(Box::new(ProgramWriter {
|
||||
stdin: process.stdin.take().unwrap(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
let program = self.program.clone();
|
||||
let args = self.args.clone();
|
||||
|
||||
debug!("META: Executing command for finalize: {:?} {:?}", program, args);
|
||||
|
||||
let mut process = Command::new(program)
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, format!("Failed to spawn process: {}", e)))?;
|
||||
|
||||
let stdin = process.stdin.as_mut().unwrap();
|
||||
stdin.write_all(&self.buffer)
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, format!("Failed to write to stdin: {}", e)))?;
|
||||
|
||||
let output = process.wait_with_output()
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, format!("Failed to wait for process: {}", e)))?;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let trimmed_result = stdout.trim();
|
||||
|
||||
// For certain programs, we only want the first part before whitespace
|
||||
if self.split_whitespace {
|
||||
let parts: Vec<&str> = trimmed_result.split_whitespace().collect();
|
||||
if !parts.is_empty() {
|
||||
Ok(parts[0].to_string())
|
||||
} else {
|
||||
Ok(trimmed_result.to_string())
|
||||
}
|
||||
} else {
|
||||
Ok(trimmed_result.to_string())
|
||||
}
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!("Command failed: {}", stderr.trim()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8]) {
|
||||
self.buffer.extend_from_slice(data);
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
|
||||
fn program_info(&self) -> Option<(&str, Vec<&str>)> {
|
||||
if self.supported {
|
||||
Some((&self.program, self.args.iter().map(|s| s.as_str()).collect()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn get_program_path(program: &str) -> Result<String> {
|
||||
debug!("META: Looking for executable: {}", program);
|
||||
if let Ok(path) = env::var("PATH") {
|
||||
for p in path.split(':') {
|
||||
let p_str = format!("{}/{}", p, program);
|
||||
let stat = fs::metadata(p_str.clone());
|
||||
if let Ok(stat) = stat {
|
||||
let md = stat;
|
||||
let permissions = md.permissions();
|
||||
if md.is_file() && permissions.mode() & 0o111 != 0 {
|
||||
return Ok(p_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(anyhow!("Unable to find binary {} in PATH", program))
|
||||
}
|
||||
236
src/meta_plugin/read_rate.rs
Normal file
236
src/meta_plugin/read_rate.rs
Normal file
@@ -0,0 +1,236 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginType};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
/// Meta plugin that calculates the read rate (KB/s) of input data.
|
||||
///
|
||||
/// Tracks bytes read and elapsed time, then computes the rate in finalize().
|
||||
/// Outputs the rate via configured mappings. Supports options for customization
|
||||
/// (though defaults are used here).
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `start_time` - Start time of reading, if begun.
|
||||
/// * `bytes_read` - Total bytes accumulated.
|
||||
/// * `is_finalized` - Whether processing is complete.
|
||||
/// * `base` - Base plugin for outputs and options.
|
||||
pub struct ReadRateMetaPlugin {
|
||||
start_time: Option<Instant>,
|
||||
bytes_read: u64,
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl ReadRateMetaPlugin {
|
||||
/// Creates a new `ReadRateMetaPlugin` instance.
|
||||
///
|
||||
/// Initializes with default options and outputs, merging provided ones.
|
||||
/// Starts tracking from zero bytes and no start time.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_options` - Optional configuration options (merged with defaults; unused specifics here).
|
||||
/// * `outputs` - Optional output mappings (merged with default "read_rate").
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new, un-finalized `ReadRateMetaPlugin` instance.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let plugin = ReadRateMetaPlugin::new(None, None);
|
||||
/// assert!(!plugin.is_finalized());
|
||||
/// ```
|
||||
pub fn new(
|
||||
_options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> ReadRateMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["read_rate"];
|
||||
base.initialize_plugin(default_outputs, &_options, &outputs);
|
||||
|
||||
ReadRateMetaPlugin {
|
||||
start_time: None,
|
||||
bytes_read: 0,
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ReadRateMetaPlugin {
|
||||
/// Checks if the plugin has been finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if finalized (processing complete), `false` otherwise.
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
/// Sets the finalized state of the plugin.
|
||||
///
|
||||
/// Marks the plugin as complete or resets it.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `finalized` - Whether processing is now complete.
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
/// Finalizes the plugin, calculating the read rate.
|
||||
///
|
||||
/// Computes KB/s from bytes read and elapsed time. Outputs via mappings.
|
||||
/// Idempotent: skips if already finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with rate metadata (if computable) and finalized=true.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// None; returns empty metadata if no start time or zero duration.
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
if let Some(start_time) = self.start_time {
|
||||
let duration = start_time.elapsed();
|
||||
let rate = if duration.as_secs_f64() > 0.0 {
|
||||
format!(
|
||||
"{:.2} KB/s",
|
||||
(self.bytes_read as f64 / 1024.0) / duration.as_secs_f64()
|
||||
)
|
||||
} else {
|
||||
"N/A".to_string()
|
||||
};
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"read_rate",
|
||||
serde_yaml::Value::String(rate),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates the plugin with new data, accumulating bytes read.
|
||||
///
|
||||
/// Starts timer on first update if not set. Accumulates byte count.
|
||||
/// Idempotent post-finalize: ignores data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `data` - Byte slice to process (length added to total).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginResponse` with no metadata and finalized=false (unless already done).
|
||||
fn update(&mut self, data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
if self.start_time.is_none() {
|
||||
self.start_time = Some(Instant::now());
|
||||
}
|
||||
self.bytes_read += data.len() as u64;
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginType::ReadRate`.
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::ReadRate
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Immutable reference to the outputs HashMap.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// Allows modification of output configurations.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Mutable reference to the outputs HashMap.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
/// Returns the default output names for this plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Vector containing "read_rate".
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec!["read_rate".to_string()]
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Immutable reference to the options HashMap.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// Allows modification of plugin options.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Mutable reference to the options HashMap.
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_read_rate_plugin() {
|
||||
register_meta_plugin(MetaPluginType::ReadRate, |options, outputs| {
|
||||
Box::new(ReadRateMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
124
src/meta_plugin/read_time.rs
Normal file
124
src/meta_plugin/read_time.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginType};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ReadTimeMetaPlugin {
|
||||
start_time: Option<Instant>,
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl ReadTimeMetaPlugin {
|
||||
pub fn new(
|
||||
_options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> ReadTimeMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["read_time"];
|
||||
base.initialize_plugin(default_outputs, &_options, &outputs);
|
||||
|
||||
ReadTimeMetaPlugin {
|
||||
start_time: None,
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ReadTimeMetaPlugin {
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
if let Some(start_time) = self.start_time {
|
||||
let duration = start_time.elapsed();
|
||||
let duration_str = format!("{:.3} seconds", duration.as_secs_f64());
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"read_time",
|
||||
serde_yaml::Value::String(duration_str),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
if self.start_time.is_none() {
|
||||
self.start_time = Some(Instant::now());
|
||||
}
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::ReadTime
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec!["read_time".to_string()]
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_read_time_plugin() {
|
||||
register_meta_plugin(MetaPluginType::ReadTime, |options, outputs| {
|
||||
Box::new(ReadTimeMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
238
src/meta_plugin/shell.rs
Normal file
238
src/meta_plugin/shell.rs
Normal file
@@ -0,0 +1,238 @@
|
||||
use std::env;
|
||||
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginType};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
/// Meta plugin for capturing shell environment information.
|
||||
///
|
||||
/// This plugin retrieves the current shell from the SHELL environment variable
|
||||
/// and provides it as metadata. It runs once during initialization and does not
|
||||
/// process input data.
|
||||
pub struct ShellMetaPlugin {
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl ShellMetaPlugin {
|
||||
/// Creates a new ShellMetaPlugin instance.
|
||||
///
|
||||
/// Initializes with default outputs and options, overridden by provided values.
|
||||
/// Defaults to "shell" as the output key.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_options` - Optional configuration options (unused currently).
|
||||
/// * `outputs` - Optional output mappings to override defaults.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `ShellMetaPlugin` - A new instance with processed options and outputs.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let plugin = ShellMetaPlugin::new(None, None);
|
||||
/// ```
|
||||
pub fn new(
|
||||
_options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> ShellMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["shell"];
|
||||
base.initialize_plugin(default_outputs, &_options, &outputs);
|
||||
|
||||
ShellMetaPlugin {
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ShellMetaPlugin {
|
||||
/// Checks if the plugin has been finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `bool` - True if finalized, false otherwise.
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
/// Sets the finalized state of the plugin.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `finalized` - The new finalized state.
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
/// Finalizes the plugin without processing data.
|
||||
///
|
||||
/// For this plugin, finalization is handled in `initialize`, so this returns empty metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginResponse` - Response with no metadata and finalized state.
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates the plugin with data (not used for shell).
|
||||
///
|
||||
/// Shell plugin doesn't process data streams; returns empty response unless not finalized.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_data` - Byte slice of input data (ignored).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginResponse` - Empty metadata response.
|
||||
fn update(&mut self, _data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginType::Shell` - The shell plugin type.
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::Shell
|
||||
}
|
||||
|
||||
/// Initializes the plugin and extracts shell metadata.
|
||||
///
|
||||
/// Retrieves the SHELL environment variable and adds it to metadata.
|
||||
/// Marks the plugin as finalized after one run.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginResponse` - Response with shell metadata and finalized state.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut plugin = ShellMetaPlugin::new(None, None);
|
||||
/// let response = plugin.initialize();
|
||||
/// assert!(response.is_finalized);
|
||||
/// ```
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
let shell = match env::var("SHELL") {
|
||||
Ok(shell) => shell,
|
||||
Err(_) => "unknown".to_string(),
|
||||
};
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"shell",
|
||||
serde_yaml::Value::String(shell),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
|
||||
// Mark as finalized since this plugin only needs to run once
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the plugin's outputs.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `&HashMap<String, serde_yaml::Value>` - The outputs map.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the plugin's outputs.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `&mut HashMap<String, serde_yaml::Value>` - Mutable outputs map.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
/// Returns the default output names for this plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Vector containing "shell".
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec!["shell".to_string()]
|
||||
}
|
||||
|
||||
/// Returns a reference to the plugin's options.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `&HashMap<String, serde_yaml::Value>` - The options map.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the plugin's options.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `&mut HashMap<String, serde_yaml::Value>` - Mutable options map.
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
/// Registers the shell meta plugin with the global registry.
|
||||
///
|
||||
/// This constructor function is called at module load time using ctor crate.
|
||||
/// It creates the plugin with provided options and outputs.
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_shell_plugin() {
|
||||
register_meta_plugin(MetaPluginType::Shell, |options, outputs| {
|
||||
Box::new(ShellMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
132
src/meta_plugin/shell_pid.rs
Normal file
132
src/meta_plugin/shell_pid.rs
Normal file
@@ -0,0 +1,132 @@
|
||||
use crate::meta_plugin::{BaseMetaPlugin, MetaPlugin, MetaPluginType};
|
||||
use std::env;
|
||||
use std::process;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ShellPidMetaPlugin {
|
||||
is_finalized: bool,
|
||||
base: BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl ShellPidMetaPlugin {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> ShellPidMetaPlugin {
|
||||
let mut base = BaseMetaPlugin::new();
|
||||
|
||||
// Set default outputs
|
||||
let default_outputs = &["shell_pid"];
|
||||
base.initialize_plugin(default_outputs, &options, &outputs);
|
||||
|
||||
ShellPidMetaPlugin {
|
||||
is_finalized: false,
|
||||
base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ShellPidMetaPlugin {
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::ShellPid
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return crate::meta_plugin::MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
let pid = match env::var("PPID") {
|
||||
Ok(ppid) => ppid,
|
||||
Err(_) => process::id().to_string(),
|
||||
};
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"shell_pid",
|
||||
serde_yaml::Value::String(pid),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
|
||||
// Mark as finalized since this plugin only needs to run once
|
||||
self.is_finalized = true;
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_shell_pid_plugin() {
|
||||
register_meta_plugin(MetaPluginType::ShellPid, |options, outputs| {
|
||||
Box::new(ShellPidMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
@@ -1,448 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use gethostname::gethostname;
|
||||
use local_ip_address::local_ip;
|
||||
use dns_lookup::lookup_addr;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
use std::env;
|
||||
use std::process;
|
||||
use uzers::{get_current_uid, get_current_gid, get_current_username, get_current_groupname};
|
||||
|
||||
use crate::common::is_binary;
|
||||
use crate::meta_plugin::MetaPlugin;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct CwdMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct BinaryMetaPlugin {
|
||||
meta_name: String,
|
||||
buffer: Vec<u8>,
|
||||
max_buffer_size: usize,
|
||||
}
|
||||
|
||||
impl BinaryMetaPlugin {
|
||||
pub fn new() -> BinaryMetaPlugin {
|
||||
BinaryMetaPlugin {
|
||||
meta_name: "binary".to_string(),
|
||||
buffer: Vec::new(),
|
||||
max_buffer_size: 4096, // 4KB
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl MetaPlugin for BinaryMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
let is_binary = is_binary(&self.buffer);
|
||||
Ok(if is_binary { "true".to_string() } else { "false".to_string() })
|
||||
}
|
||||
|
||||
fn update(&mut self, data: &[u8]) {
|
||||
// Only collect up to max_buffer_size
|
||||
let remaining_capacity = self.max_buffer_size.saturating_sub(self.buffer.len());
|
||||
if remaining_capacity > 0 {
|
||||
let bytes_to_copy = std::cmp::min(data.len(), remaining_capacity);
|
||||
self.buffer.extend_from_slice(&data[..bytes_to_copy]);
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl CwdMetaPlugin {
|
||||
pub fn new() -> CwdMetaPlugin {
|
||||
CwdMetaPlugin {
|
||||
meta_name: "cwd".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for CwdMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
match env::current_dir() {
|
||||
Ok(path) => Ok(path.to_string_lossy().to_string()),
|
||||
Err(_) => Ok("unknown".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct UidMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl UidMetaPlugin {
|
||||
pub fn new() -> UidMetaPlugin {
|
||||
UidMetaPlugin {
|
||||
meta_name: "uid".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for UidMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
Ok(get_current_uid().to_string())
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct UserMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl UserMetaPlugin {
|
||||
pub fn new() -> UserMetaPlugin {
|
||||
UserMetaPlugin {
|
||||
meta_name: "user".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for UserMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
match get_current_username() {
|
||||
Some(username) => Ok(username.to_string_lossy().to_string()),
|
||||
None => Ok("unknown".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct GidMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl GidMetaPlugin {
|
||||
pub fn new() -> GidMetaPlugin {
|
||||
GidMetaPlugin {
|
||||
meta_name: "gid".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for GidMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
Ok(get_current_gid().to_string())
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct GroupMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl GroupMetaPlugin {
|
||||
pub fn new() -> GroupMetaPlugin {
|
||||
GroupMetaPlugin {
|
||||
meta_name: "group".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for GroupMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
match get_current_groupname() {
|
||||
Some(groupname) => Ok(groupname.to_string_lossy().to_string()),
|
||||
None => Ok("unknown".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ShellMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl ShellMetaPlugin {
|
||||
pub fn new() -> ShellMetaPlugin {
|
||||
ShellMetaPlugin {
|
||||
meta_name: "shell".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ShellMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
match env::var("SHELL") {
|
||||
Ok(shell) => Ok(shell),
|
||||
Err(_) => Ok("unknown".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ShellPidMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl ShellPidMetaPlugin {
|
||||
pub fn new() -> ShellPidMetaPlugin {
|
||||
ShellPidMetaPlugin {
|
||||
meta_name: "shell_pid".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for ShellPidMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
match env::var("PPID") {
|
||||
Ok(ppid) => Ok(ppid),
|
||||
Err(_) => Ok(process::id().to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct KeepPidMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl KeepPidMetaPlugin {
|
||||
pub fn new() -> KeepPidMetaPlugin {
|
||||
KeepPidMetaPlugin {
|
||||
meta_name: "keep_pid".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for KeepPidMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
Ok(process::id().to_string())
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct HostnameMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl HostnameMetaPlugin {
|
||||
pub fn new() -> HostnameMetaPlugin {
|
||||
HostnameMetaPlugin {
|
||||
meta_name: "hostname".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for HostnameMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
match gethostname().into_string() {
|
||||
Ok(hostname) => Ok(hostname),
|
||||
Err(_) => Ok("unknown".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed for hostname
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FullHostnameMetaPlugin {
|
||||
meta_name: String,
|
||||
}
|
||||
|
||||
impl FullHostnameMetaPlugin {
|
||||
pub fn new() -> FullHostnameMetaPlugin {
|
||||
FullHostnameMetaPlugin {
|
||||
meta_name: "full_hostname".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for FullHostnameMetaPlugin {
|
||||
fn is_internal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn create(&self) -> Result<Box<dyn Write>> {
|
||||
Ok(Box::new(io::sink()))
|
||||
}
|
||||
|
||||
fn finalize(&mut self) -> io::Result<String> {
|
||||
// Try to get the FQDN through reverse DNS lookup
|
||||
match local_ip() {
|
||||
Ok(my_local_ip) => {
|
||||
match lookup_addr(&my_local_ip) {
|
||||
Ok(hostname) => Ok(hostname),
|
||||
Err(_) => {
|
||||
// Fall back to regular hostname if reverse DNS fails
|
||||
match gethostname().into_string() {
|
||||
Ok(hostname) => Ok(hostname),
|
||||
Err(_) => Ok("unknown".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// Fall back to regular hostname if we can't get local IP
|
||||
match gethostname().into_string() {
|
||||
Ok(hostname) => Ok(hostname),
|
||||
Err(_) => Ok("unknown".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, _data: &[u8]) {
|
||||
// No update needed for full hostname
|
||||
}
|
||||
|
||||
fn meta_name(&mut self) -> String {
|
||||
self.meta_name.clone()
|
||||
}
|
||||
}
|
||||
|
||||
802
src/meta_plugin/text.rs
Normal file
802
src/meta_plugin/text.rs
Normal file
@@ -0,0 +1,802 @@
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::common::is_binary::is_binary;
|
||||
use crate::meta_plugin::{MetaPlugin, MetaPluginResponse, MetaPluginType};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TextMetaPlugin {
|
||||
buffer: Option<Vec<u8>>,
|
||||
max_buffer_size: usize,
|
||||
is_finalized: bool,
|
||||
word_count: usize,
|
||||
line_count: usize,
|
||||
is_binary_content: Option<bool>,
|
||||
// State for tracking word boundaries across chunks
|
||||
in_word: bool,
|
||||
// Buffer for handling UTF-8 character boundaries
|
||||
utf8_buffer: Vec<u8>,
|
||||
base: crate::meta_plugin::BaseMetaPlugin,
|
||||
// Options to track specific statistics
|
||||
track_word_count: bool,
|
||||
track_line_count: bool,
|
||||
track_line_lengths: bool,
|
||||
// Flags for which line length statistics to output
|
||||
output_line_max_len: bool,
|
||||
output_line_mean_len: bool,
|
||||
output_line_median_len: bool,
|
||||
// For tracking line lengths
|
||||
line_lengths: Option<Vec<usize>>,
|
||||
current_line_length: usize,
|
||||
// For incremental calculation of max and mean
|
||||
max_line_length: usize,
|
||||
total_line_length: usize,
|
||||
line_count_for_stats: usize,
|
||||
}
|
||||
|
||||
impl TextMetaPlugin {
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> TextMetaPlugin {
|
||||
let mut base = crate::meta_plugin::BaseMetaPlugin::new();
|
||||
|
||||
// Initialize with helper function
|
||||
base.initialize_plugin(
|
||||
&[
|
||||
"text",
|
||||
"text_word_count",
|
||||
"text_line_count",
|
||||
"text_line_max_len",
|
||||
"text_line_mean_len",
|
||||
"text_line_median_len",
|
||||
],
|
||||
&options,
|
||||
&outputs,
|
||||
);
|
||||
|
||||
// Set disabled outputs to null based on options
|
||||
let outputs_to_disable = vec![
|
||||
("text_word_count", "text_word_count"),
|
||||
("text_line_count", "text_line_count"),
|
||||
("text_line_max_len", "text_line_max_len"),
|
||||
("text_line_mean_len", "text_line_mean_len"),
|
||||
("text_line_median_len", "text_line_median_len"),
|
||||
];
|
||||
|
||||
for (option_name, output_name) in outputs_to_disable {
|
||||
if let Some(value) = base.options.get(option_name) {
|
||||
// Handle both boolean false and string "false"
|
||||
let should_disable = match value {
|
||||
serde_yaml::Value::Bool(b) => !b,
|
||||
serde_yaml::Value::String(s) => s == "false",
|
||||
_ => false,
|
||||
};
|
||||
if should_disable {
|
||||
base.outputs
|
||||
.insert(output_name.to_string(), serde_yaml::Value::Null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set default options if not provided
|
||||
let default_options = vec![
|
||||
(
|
||||
"text_detect_size",
|
||||
serde_yaml::Value::Number(PIPESIZE.into()),
|
||||
),
|
||||
("text_word_count", serde_yaml::Value::Bool(true)),
|
||||
("text_line_count", serde_yaml::Value::Bool(true)),
|
||||
("text_line_max_len", serde_yaml::Value::Bool(true)),
|
||||
("text_line_mean_len", serde_yaml::Value::Bool(true)),
|
||||
("text_line_median_len", serde_yaml::Value::Bool(false)),
|
||||
];
|
||||
|
||||
for (key, value) in default_options {
|
||||
if !base.options.contains_key(key) {
|
||||
base.options.insert(key.to_string(), value);
|
||||
}
|
||||
}
|
||||
|
||||
// Get text_detect_size (previously max_buffer_size)
|
||||
let max_buffer_size = base
|
||||
.options
|
||||
.get("text_detect_size")
|
||||
.or_else(|| base.options.get("max_buffer_size")) // Handle backward compatibility
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(PIPESIZE as u64) as usize;
|
||||
|
||||
// Get which statistics to track
|
||||
let track_word_count = base
|
||||
.options
|
||||
.get("text_word_count")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
let track_line_count = base
|
||||
.options
|
||||
.get("text_line_count")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
let track_line_max_len = base
|
||||
.options
|
||||
.get("text_line_max_len")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
let track_line_mean_len = base
|
||||
.options
|
||||
.get("text_line_mean_len")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
let track_line_median_len = base
|
||||
.options
|
||||
.get("text_line_median_len")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
// Track line lengths if any of the line length options are enabled
|
||||
let track_line_lengths = track_line_max_len || track_line_mean_len || track_line_median_len;
|
||||
|
||||
TextMetaPlugin {
|
||||
buffer: Some(Vec::new()),
|
||||
max_buffer_size,
|
||||
is_finalized: false,
|
||||
word_count: 0,
|
||||
line_count: 0,
|
||||
is_binary_content: None,
|
||||
in_word: false,
|
||||
utf8_buffer: Vec::new(),
|
||||
base,
|
||||
// Add fields for line length tracking
|
||||
track_word_count,
|
||||
track_line_count,
|
||||
track_line_lengths,
|
||||
// Set output flags
|
||||
output_line_max_len: track_line_max_len,
|
||||
output_line_mean_len: track_line_mean_len,
|
||||
output_line_median_len: track_line_median_len,
|
||||
line_lengths: if track_line_lengths {
|
||||
Some(Vec::new())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
current_line_length: 0,
|
||||
// Initialize incremental tracking for max and mean
|
||||
max_line_length: 0,
|
||||
total_line_length: 0,
|
||||
line_count_for_stats: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Count words and lines in a text chunk, handling block boundaries correctly.
|
||||
///
|
||||
/// Processes UTF-8 data, tracks word transitions, and updates line length statistics.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `data` - Byte slice of text content.
|
||||
fn count_text_stats(&mut self, data: &[u8]) {
|
||||
// Count lines (newlines) if needed
|
||||
if self.track_line_count {
|
||||
self.line_count += data.iter().filter(|&&b| b == b'\n').count();
|
||||
}
|
||||
|
||||
// Handle UTF-8 character boundaries by combining with any buffered bytes
|
||||
let combined_data = if !self.utf8_buffer.is_empty() {
|
||||
let mut combined = self.utf8_buffer.clone();
|
||||
combined.extend_from_slice(data);
|
||||
combined
|
||||
} else {
|
||||
data.to_vec()
|
||||
};
|
||||
|
||||
// Clear the UTF-8 buffer
|
||||
self.utf8_buffer.clear();
|
||||
|
||||
// Convert to string, handling potential UTF-8 boundaries
|
||||
let text = match std::str::from_utf8(&combined_data) {
|
||||
Ok(text) => text,
|
||||
Err(e) => {
|
||||
// If we have incomplete UTF-8 at the end, buffer those bytes for next chunk
|
||||
let valid_up_to = e.valid_up_to();
|
||||
if valid_up_to < combined_data.len() {
|
||||
self.utf8_buffer
|
||||
.extend_from_slice(&combined_data[valid_up_to..]);
|
||||
}
|
||||
match std::str::from_utf8(&combined_data[..valid_up_to]) {
|
||||
Ok(text) => text,
|
||||
Err(_) => return, // Can't process this data
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Count words if needed
|
||||
if self.track_word_count {
|
||||
for ch in text.chars() {
|
||||
let is_whitespace = ch.is_whitespace();
|
||||
|
||||
if !self.in_word && !is_whitespace {
|
||||
// Transition from whitespace to word - start of new word
|
||||
self.word_count += 1;
|
||||
self.in_word = true;
|
||||
} else if self.in_word && is_whitespace {
|
||||
// Transition from word to whitespace - end of current word
|
||||
self.in_word = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Track line lengths if needed
|
||||
if self.track_line_lengths {
|
||||
for ch in text.chars() {
|
||||
if ch == '\n' {
|
||||
// Update max line length
|
||||
if self.current_line_length > self.max_line_length {
|
||||
self.max_line_length = self.current_line_length;
|
||||
}
|
||||
|
||||
// Update total for mean calculation
|
||||
self.total_line_length += self.current_line_length;
|
||||
self.line_count_for_stats += 1;
|
||||
|
||||
// Only store individual lengths if median is needed
|
||||
if let Some(ref mut lengths) = self.line_lengths {
|
||||
lengths.push(self.current_line_length);
|
||||
}
|
||||
|
||||
self.current_line_length = 0;
|
||||
} else {
|
||||
self.current_line_length += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper method to perform binary detection and return appropriate metadata.
|
||||
///
|
||||
/// Uses the is_binary function to check the buffer and sets text-related outputs accordingly.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `buffer` - Data to check for binary content.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `(Vec<MetaData>, bool)` - Metadata updates and whether content is binary.
|
||||
fn perform_binary_detection(
|
||||
&mut self,
|
||||
buffer: &[u8],
|
||||
) -> (Vec<crate::meta_plugin::MetaData>, bool) {
|
||||
let mut metadata = Vec::new();
|
||||
let is_binary_result = is_binary(buffer);
|
||||
self.is_binary_content = Some(is_binary_result);
|
||||
|
||||
// Output text status
|
||||
let text_value = if is_binary_result {
|
||||
"false".to_string()
|
||||
} else {
|
||||
"true".to_string()
|
||||
};
|
||||
|
||||
// Use process_metadata_outputs to handle output mapping
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
"text",
|
||||
serde_yaml::Value::String(text_value),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
|
||||
// If content is binary, set all text-related outputs to None
|
||||
if is_binary_result {
|
||||
let text_outputs = vec![
|
||||
"text_word_count",
|
||||
"text_line_count",
|
||||
"text_line_max_len",
|
||||
"text_line_mean_len",
|
||||
"text_line_median_len",
|
||||
];
|
||||
for output_name in text_outputs {
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
output_name,
|
||||
serde_yaml::Value::Null,
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(metadata, is_binary_result)
|
||||
}
|
||||
|
||||
/// Helper method to process the remaining UTF-8 buffer and finalize text statistics.
|
||||
///
|
||||
/// Calls count_text_stats with empty data to handle any pending UTF-8 bytes.
|
||||
fn process_remaining_utf8_buffer(&mut self) {
|
||||
if !self.utf8_buffer.is_empty() {
|
||||
self.count_text_stats(&[]);
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper method to handle the last line when tracking line lengths.
|
||||
///
|
||||
/// Updates statistics for any unfinished line at EOF.
|
||||
fn handle_last_line_for_length_tracking(&mut self) {
|
||||
if self.track_line_lengths && self.current_line_length > 0 {
|
||||
// Update max line length for the last line
|
||||
if self.current_line_length > self.max_line_length {
|
||||
self.max_line_length = self.current_line_length;
|
||||
}
|
||||
|
||||
// Update total for mean calculation for the last line
|
||||
self.total_line_length += self.current_line_length;
|
||||
self.line_count_for_stats += 1;
|
||||
|
||||
// Only store individual lengths if median is needed
|
||||
if let Some(ref mut lengths) = self.line_lengths {
|
||||
lengths.push(self.current_line_length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper method to output word count metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Option<MetaData>` - Metadata entry if tracking is enabled.
|
||||
fn output_word_count_metadata(&self) -> Option<crate::meta_plugin::MetaData> {
|
||||
if self.track_word_count {
|
||||
crate::meta_plugin::process_metadata_outputs(
|
||||
"text_word_count",
|
||||
serde_yaml::Value::String(self.word_count.to_string()),
|
||||
self.base.outputs(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper method to output line count metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Option<MetaData>` - Metadata entry if tracking is enabled.
|
||||
fn output_line_count_metadata(&self) -> Option<crate::meta_plugin::MetaData> {
|
||||
if self.track_line_count {
|
||||
crate::meta_plugin::process_metadata_outputs(
|
||||
"text_line_count",
|
||||
serde_yaml::Value::String(self.line_count.to_string()),
|
||||
self.base.outputs(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper method to output max line length metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Option<MetaData>` - Metadata entry if enabled and data exists.
|
||||
fn output_max_line_length_metadata(&self) -> Option<crate::meta_plugin::MetaData> {
|
||||
if self.output_line_max_len && self.line_count_for_stats > 0 {
|
||||
crate::meta_plugin::process_metadata_outputs(
|
||||
"text_line_max_len",
|
||||
serde_yaml::Value::String(self.max_line_length.to_string()),
|
||||
self.base.outputs(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper method to output mean line length metadata.
|
||||
///
|
||||
/// Computes average line length and rounds to nearest integer.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Option<MetaData>` - Metadata entry if enabled and data exists.
|
||||
fn output_mean_line_length_metadata(&self) -> Option<crate::meta_plugin::MetaData> {
|
||||
if self.output_line_mean_len && self.line_count_for_stats > 0 {
|
||||
let mean_len = self.total_line_length as f64 / self.line_count_for_stats as f64;
|
||||
// Round to nearest integer
|
||||
let mean_len_int = mean_len.round() as usize;
|
||||
crate::meta_plugin::process_metadata_outputs(
|
||||
"text_line_mean_len",
|
||||
serde_yaml::Value::String(mean_len_int.to_string()),
|
||||
self.base.outputs(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper method to output median line length metadata.
|
||||
///
|
||||
/// Sorts line lengths and computes median (average of middle two for even count).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Option<MetaData>` - Metadata entry if enabled and data exists.
|
||||
fn output_median_line_length_metadata(&self) -> Option<crate::meta_plugin::MetaData> {
|
||||
if self.output_line_median_len
|
||||
&& let Some(lengths) = &self.line_lengths
|
||||
&& !lengths.is_empty()
|
||||
{
|
||||
let mut sorted_lengths = lengths.clone();
|
||||
sorted_lengths.sort();
|
||||
let median_len = if lengths.len() % 2 == 0 {
|
||||
(sorted_lengths[lengths.len() / 2 - 1] + sorted_lengths[lengths.len() / 2]) as f64
|
||||
/ 2.0
|
||||
} else {
|
||||
sorted_lengths[lengths.len() / 2] as f64
|
||||
};
|
||||
|
||||
return crate::meta_plugin::process_metadata_outputs(
|
||||
"text_line_median_len",
|
||||
serde_yaml::Value::String(median_len.to_string()),
|
||||
self.base.outputs(),
|
||||
);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Helper method to output word and line counts.
|
||||
///
|
||||
/// Finalizes pending data and collects all enabled text statistics metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Vec<MetaData>` - List of metadata entries.
|
||||
fn output_word_line_counts(&mut self) -> Vec<crate::meta_plugin::MetaData> {
|
||||
// Process any remaining data in utf8_buffer
|
||||
self.process_remaining_utf8_buffer();
|
||||
|
||||
// Handle the last line if tracking line lengths
|
||||
self.handle_last_line_for_length_tracking();
|
||||
|
||||
// Collect all metadata outputs
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Add metadata outputs using a more concise approach
|
||||
let outputs_to_check = vec![
|
||||
(self.output_word_count_metadata(), "word count"),
|
||||
(self.output_line_count_metadata(), "line count"),
|
||||
];
|
||||
|
||||
for (output, _) in outputs_to_check {
|
||||
if let Some(meta_data) = output {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
|
||||
// Output line length statistics if tracked
|
||||
if self.track_line_lengths && self.line_count_for_stats > 0 {
|
||||
let line_stats_outputs = vec![
|
||||
(self.output_max_line_length_metadata(), "max line length"),
|
||||
(self.output_mean_line_length_metadata(), "mean line length"),
|
||||
(
|
||||
self.output_median_line_length_metadata(),
|
||||
"median line length",
|
||||
),
|
||||
];
|
||||
|
||||
for (output, _) in line_stats_outputs {
|
||||
if let Some(meta_data) = output {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for TextMetaPlugin {
|
||||
/// Checks if the plugin has been finalized.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` if finalized, `false` otherwise.
|
||||
fn is_finalized(&self) -> bool {
|
||||
self.is_finalized
|
||||
}
|
||||
|
||||
/// Sets the finalized state of the plugin.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `finalized` - The new finalized state.
|
||||
fn set_finalized(&mut self, finalized: bool) {
|
||||
self.is_finalized = finalized;
|
||||
}
|
||||
|
||||
/// Updates the plugin with new data chunk.
|
||||
///
|
||||
/// Accumulates data for binary detection (if pending) or text statistics.
|
||||
/// Finalizes early if binary content is detected.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `data` - Byte slice of content chunk.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginResponse` - Current metadata and finalized status.
|
||||
fn update(&mut self, data: &[u8]) -> MetaPluginResponse {
|
||||
// If already finalized, don't process more data
|
||||
if self.is_finalized {
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
let processed_data = data.to_vec();
|
||||
|
||||
// If we haven't determined if content is binary yet, build buffer and check
|
||||
if self.is_binary_content.is_none() {
|
||||
let should_finalize = if let Some(ref mut buffer) = self.buffer {
|
||||
// Add processed data to our buffer up to max_buffer_size
|
||||
let remaining_capacity = self.max_buffer_size.saturating_sub(buffer.len());
|
||||
let bytes_to_take = std::cmp::min(processed_data.len(), remaining_capacity);
|
||||
buffer.extend_from_slice(&processed_data[..bytes_to_take]);
|
||||
|
||||
// If we have enough data to make a binary determination, do it now
|
||||
let buffer_len = buffer.len();
|
||||
if buffer_len >= std::cmp::min(1024, self.max_buffer_size) {
|
||||
// Clone the buffer data for binary detection to avoid borrowing conflicts
|
||||
let buffer_clone = buffer.clone();
|
||||
let (binary_metadata, is_binary) = self.perform_binary_detection(&buffer_clone);
|
||||
metadata.extend(binary_metadata);
|
||||
self.is_binary_content = Some(is_binary);
|
||||
|
||||
// If it's binary, we're done with this plugin
|
||||
if is_binary {
|
||||
self.buffer = None; // Drop the buffer
|
||||
self.is_finalized = true;
|
||||
return MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
// If it's text, count words and lines for this chunk
|
||||
self.count_text_stats(&processed_data[..bytes_to_take]);
|
||||
|
||||
// If we've reached our buffer limit, drop the buffer to save memory
|
||||
// But don't finalize yet - we need to keep counting words and lines
|
||||
if buffer_len >= self.max_buffer_size {
|
||||
self.buffer = None; // Drop the buffer
|
||||
}
|
||||
false // Never finalize here for text content
|
||||
} else {
|
||||
// Still building up buffer, count words and lines for this chunk
|
||||
self.count_text_stats(&processed_data[..bytes_to_take]);
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if should_finalize {
|
||||
return MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
} else if self.is_binary_content == Some(false) {
|
||||
// We've already determined it's text, just count words and lines
|
||||
self.count_text_stats(&processed_data);
|
||||
}
|
||||
// If is_binary_content == Some(true), we should have already finalized, but just in case:
|
||||
else if self.is_binary_content == Some(true) {
|
||||
self.is_finalized = true;
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: self.is_finalized,
|
||||
}
|
||||
}
|
||||
|
||||
/// Finalizes the plugin and emits all pending text statistics.
|
||||
///
|
||||
/// Performs binary detection if not done, then outputs enabled statistics.
|
||||
/// Handles head/tail options for content preview (future implementation).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `MetaPluginResponse` - Final metadata and finalized status.
|
||||
fn finalize(&mut self) -> MetaPluginResponse {
|
||||
// If already finalized, don't process again
|
||||
if self.is_finalized {
|
||||
return MetaPluginResponse {
|
||||
metadata: Vec::new(),
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Check if we have head/tail options
|
||||
let head_bytes = self
|
||||
.base
|
||||
.options
|
||||
.get("head_bytes")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|v| v as usize);
|
||||
let head_lines = self
|
||||
.base
|
||||
.options
|
||||
.get("head_lines")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|v| v as usize);
|
||||
let tail_bytes = self
|
||||
.base
|
||||
.options
|
||||
.get("tail_bytes")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|v| v as usize);
|
||||
let tail_lines = self
|
||||
.base
|
||||
.options
|
||||
.get("tail_lines")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|v| v as usize);
|
||||
|
||||
// If we haven't determined binary status yet, do it now with whatever we have
|
||||
if self.is_binary_content.is_none()
|
||||
&& let Some(buffer) = &self.buffer
|
||||
&& !buffer.is_empty()
|
||||
{
|
||||
// Build filter string from individual parameters
|
||||
let mut filter_parts = Vec::new();
|
||||
if let Some(bytes) = head_bytes {
|
||||
filter_parts.push(format!("head_bytes({})", bytes));
|
||||
}
|
||||
if let Some(lines) = head_lines {
|
||||
filter_parts.push(format!("head_lines({})", lines));
|
||||
}
|
||||
if let Some(bytes) = tail_bytes {
|
||||
filter_parts.push(format!("tail_bytes({})", bytes));
|
||||
}
|
||||
if let Some(lines) = tail_lines {
|
||||
filter_parts.push(format!("tail_lines({})", lines));
|
||||
}
|
||||
|
||||
// For now, just use the buffer as-is since filtering isn't implemented
|
||||
let processed_buffer = buffer.clone();
|
||||
|
||||
// Clone the processed buffer data for binary detection
|
||||
let (binary_metadata, is_binary) = self.perform_binary_detection(&processed_buffer);
|
||||
metadata.extend(binary_metadata);
|
||||
self.is_binary_content = Some(is_binary);
|
||||
|
||||
// If it's binary, we're done
|
||||
if is_binary {
|
||||
self.buffer = None; // Drop the buffer
|
||||
self.is_finalized = true;
|
||||
// Set all text-related outputs to None since content is binary
|
||||
// Only include outputs that are enabled in the configuration
|
||||
let text_outputs = vec![
|
||||
("text_word_count", self.track_word_count),
|
||||
("text_line_count", self.track_line_count),
|
||||
("text_line_max_len", self.output_line_max_len),
|
||||
("text_line_mean_len", self.output_line_mean_len),
|
||||
("text_line_median_len", self.output_line_median_len),
|
||||
];
|
||||
|
||||
for (output_name, is_enabled) in text_outputs {
|
||||
if is_enabled
|
||||
&& let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
output_name,
|
||||
serde_yaml::Value::Null,
|
||||
self.base.outputs(),
|
||||
)
|
||||
{
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
return MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// If content is text, output word and line counts
|
||||
if self.is_binary_content == Some(false) {
|
||||
let word_line_metadata = self.output_word_line_counts();
|
||||
metadata.extend(word_line_metadata);
|
||||
}
|
||||
|
||||
// Only include outputs that are enabled in the configuration
|
||||
// Disabled outputs should not be emitted at all (not even as null)
|
||||
// So we don't need to add anything for disabled outputs
|
||||
|
||||
// Drop the buffer since we're done with it
|
||||
self.buffer = None;
|
||||
|
||||
// Mark as finalized
|
||||
self.is_finalized = true;
|
||||
MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginType::Text`.
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::Text
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of outputs.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of outputs.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
/// Returns the default output names for this plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Vector of default output field names.
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec![
|
||||
"text".to_string(),
|
||||
"text_word_count".to_string(),
|
||||
"text_line_count".to_string(),
|
||||
"text_line_max_len".to_string(),
|
||||
"text_line_mean_len".to_string(),
|
||||
"text_line_median_len".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of options.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of options.
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_text_plugin() {
|
||||
register_meta_plugin(MetaPluginType::Text, |options, outputs| {
|
||||
Box::new(TextMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
166
src/meta_plugin/user.rs
Normal file
166
src/meta_plugin/user.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
use crate::meta_plugin::{MetaPlugin, MetaPluginType};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
/// Meta plugin for capturing current user and group information.
|
||||
///
|
||||
/// This plugin collects user ID, group ID, username, and group name for the process
|
||||
/// running the keep application, providing context about the creator of items.
|
||||
pub struct UserMetaPlugin {
|
||||
base: crate::meta_plugin::BaseMetaPlugin,
|
||||
}
|
||||
|
||||
impl UserMetaPlugin {
|
||||
/// Creates a new `UserMetaPlugin` instance.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `options` - Optional configuration options for the plugin.
|
||||
/// * `outputs` - Optional output mappings for metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new instance of `UserMetaPlugin`.
|
||||
pub fn new(
|
||||
options: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
outputs: Option<std::collections::HashMap<String, serde_yaml::Value>>,
|
||||
) -> UserMetaPlugin {
|
||||
let mut base = crate::meta_plugin::BaseMetaPlugin::new();
|
||||
|
||||
// Initialize with helper function
|
||||
base.initialize_plugin(
|
||||
&["user_uid", "user_gid", "user_name", "user_group"],
|
||||
&options,
|
||||
&outputs,
|
||||
);
|
||||
|
||||
UserMetaPlugin { base }
|
||||
}
|
||||
|
||||
/// Gets the current username.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An `Option<String>` with the username, or `None` if unavailable.
|
||||
fn get_current_username() -> Option<String> {
|
||||
uzers::get_user_by_uid(uzers::get_current_uid())
|
||||
.map(|user| user.name().to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
/// Gets the current group name.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An `Option<String>` with the group name, or `None` if unavailable.
|
||||
fn get_current_groupname() -> Option<String> {
|
||||
uzers::get_group_by_gid(uzers::get_current_gid())
|
||||
.map(|group| group.name().to_string_lossy().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaPlugin for UserMetaPlugin {
|
||||
/// Initializes the plugin, capturing user information.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `MetaPluginResponse` with user metadata and `is_finalized` set to `true`.
|
||||
fn initialize(&mut self) -> crate::meta_plugin::MetaPluginResponse {
|
||||
let mut metadata = Vec::new();
|
||||
|
||||
// Get user info
|
||||
let uid = uzers::get_current_uid().to_string();
|
||||
let gid = uzers::get_current_gid().to_string();
|
||||
let username = Self::get_current_username().unwrap_or_else(|| "unknown".to_string());
|
||||
let groupname = Self::get_current_groupname().unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
// Process each output
|
||||
let values = [
|
||||
("user_uid", uid),
|
||||
("user_gid", gid),
|
||||
("user_name", username),
|
||||
("user_group", groupname),
|
||||
];
|
||||
|
||||
for (name, value) in values {
|
||||
if let Some(meta_data) = crate::meta_plugin::process_metadata_outputs(
|
||||
name,
|
||||
serde_yaml::Value::String(value),
|
||||
self.base.outputs(),
|
||||
) {
|
||||
metadata.push(meta_data);
|
||||
}
|
||||
}
|
||||
|
||||
crate::meta_plugin::MetaPluginResponse {
|
||||
metadata,
|
||||
is_finalized: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of this meta plugin.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `MetaPluginType::User`.
|
||||
fn meta_type(&self) -> MetaPluginType {
|
||||
MetaPluginType::User
|
||||
}
|
||||
|
||||
/// Returns a reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of outputs.
|
||||
fn outputs(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the outputs mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of outputs.
|
||||
fn outputs_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.outputs_mut()
|
||||
}
|
||||
|
||||
/// Returns the default output names.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of default output names.
|
||||
fn default_outputs(&self) -> Vec<String> {
|
||||
vec![
|
||||
"user_uid".to_string(),
|
||||
"user_gid".to_string(),
|
||||
"user_name".to_string(),
|
||||
"user_group".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
/// Returns a reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A reference to the `HashMap` of options.
|
||||
fn options(&self) -> &std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the options mapping.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A mutable reference to the `HashMap` of options.
|
||||
fn options_mut(&mut self) -> &mut std::collections::HashMap<String, serde_yaml::Value> {
|
||||
self.base.options_mut()
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::register_meta_plugin;
|
||||
|
||||
// Register the plugin at module initialization time
|
||||
#[ctor::ctor]
|
||||
fn register_user_plugin() {
|
||||
register_meta_plugin(MetaPluginType::User, |options, outputs| {
|
||||
Box::new(UserMetaPlugin::new(options, outputs))
|
||||
});
|
||||
}
|
||||
@@ -1,17 +1,78 @@
|
||||
use crate::Args;
|
||||
use crate::compression_engine::CompressionType;
|
||||
/// Common utilities shared across different modes in the Keep application.
|
||||
///
|
||||
/// This module provides helper functions for formatting, configuration parsing,
|
||||
/// table creation, and environment variable handling used by various CLI modes.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// These utilities are typically used internally by mode implementations:
|
||||
///
|
||||
/// ```
|
||||
/// use crate::modes::common::{format_size, OutputFormat};
|
||||
/// let formatted = format_size(1024, true); // "1.0K"
|
||||
/// let format = OutputFormat::from_str("json")?;
|
||||
/// ```
|
||||
use crate::config;
|
||||
use crate::meta_plugin::MetaPluginType;
|
||||
use clap::Command;
|
||||
use clap::error::ErrorKind;
|
||||
use comfy_table::{ContentArrangement, Table};
|
||||
use log::debug;
|
||||
use prettytable::format::TableFormat;
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::io::IsTerminal;
|
||||
use std::str::FromStr;
|
||||
use strum::IntoEnumIterator;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, strum::EnumString, strum::Display, PartialEq)]
|
||||
#[strum(ascii_case_insensitive)]
|
||||
/// Enum representing supported output formats for structured data.
|
||||
///
|
||||
/// Used to determine how to display lists, info, and status information in CLI modes.
|
||||
/// Defaults to Table for human-readable output; JSON/YAML for machine parsing.
|
||||
///
|
||||
/// # Variants
|
||||
///
|
||||
/// * `Table` - Formatted table output (default).
|
||||
/// * `Json` - JSON structured output.
|
||||
/// * `Yaml` - YAML structured output.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use keep::modes::common::OutputFormat;
|
||||
/// assert_eq!(OutputFormat::from_str("json").unwrap(), OutputFormat::Json);
|
||||
/// ```
|
||||
pub enum OutputFormat {
|
||||
Table,
|
||||
Json,
|
||||
Yaml,
|
||||
}
|
||||
|
||||
/// Extracts metadata from KEEP_META_* environment variables.
|
||||
///
|
||||
/// Scans environment for variables prefixed with KEEP_META_ and extracts
|
||||
/// key-value pairs for initial item metadata. Ignores KEEP_META_PLUGINS.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `HashMap<String, String>` - Metadata from environment variables, with keys in uppercase without prefix.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// None; silently ignores non-matching vars and PLUGINS.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # use std::env;
|
||||
/// # use std::collections::HashMap;
|
||||
/// env::set_var("KEEP_META_COMMAND", "ls -la");
|
||||
/// let meta = get_meta_from_env();
|
||||
/// assert_eq!(meta.get("COMMAND"), Some(&"ls -la".to_string()));
|
||||
/// ```
|
||||
pub fn get_meta_from_env() -> HashMap<String, String> {
|
||||
debug!("COMMON: Getting meta from KEEP_META_*");
|
||||
let re = Regex::new(r"^KEEP_META_(.+)$").unwrap();
|
||||
@@ -29,55 +90,56 @@ pub fn get_meta_from_env() -> HashMap<String, String> {
|
||||
meta_env
|
||||
}
|
||||
|
||||
pub fn format_size_human_readable(size: u64) -> String {
|
||||
const UNITS: &[&str] = &["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei"];
|
||||
const THRESHOLD: u64 = 1024;
|
||||
|
||||
if size == 0 {
|
||||
return "0".to_string();
|
||||
}
|
||||
|
||||
let mut size_f = size as f64;
|
||||
let mut unit_index = 0;
|
||||
|
||||
while size_f >= THRESHOLD as f64 && unit_index < UNITS.len() - 1 {
|
||||
size_f /= THRESHOLD as f64;
|
||||
unit_index += 1;
|
||||
}
|
||||
|
||||
if unit_index == 0 {
|
||||
format!("{}", size)
|
||||
} else if size_f.fract() == 0.0 {
|
||||
format!("{}{}", size_f as u64, UNITS[unit_index])
|
||||
} else {
|
||||
format!("{:.1}{}", size_f, UNITS[unit_index])
|
||||
}
|
||||
}
|
||||
|
||||
/// Formats a file size in bytes to human-readable or raw format.
|
||||
///
|
||||
/// Uses the humansize crate for human-readable output with decimal units (KB, MB, etc.).
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `size` - Size in bytes as u64.
|
||||
/// * `human_readable` - If true, use units like KB, MB; otherwise, raw bytes as string.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `String` - Formatted size string, e.g., "1.0K" or "1024".
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let raw = format_size(1024, false); // "1024"
|
||||
/// let human = format_size(1024, true); // "1.0K"
|
||||
/// ```
|
||||
pub fn format_size(size: u64, human_readable: bool) -> String {
|
||||
match human_readable {
|
||||
true => format_size_human_readable(size),
|
||||
true => humansize::format_size(size, humansize::DECIMAL),
|
||||
false => size.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string_column(s: String, column_width: usize) -> String {
|
||||
if column_width > 0 {
|
||||
match s.char_indices().nth(column_width) {
|
||||
None => s.to_string(),
|
||||
Some((idx, _)) => s[..idx].to_string(),
|
||||
}
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn size_column(size: u64, human_readable: bool, column_width: usize) -> String {
|
||||
string_column(format_size(size, human_readable), column_width)
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, strum::EnumIter, strum::Display, strum::EnumString)]
|
||||
#[derive(Debug, Eq, PartialEq, Clone, strum::EnumIter, strum::Display)]
|
||||
#[strum(ascii_case_insensitive)]
|
||||
/// Enum representing column types for table display.
|
||||
///
|
||||
/// Defines standard and meta columns for list/info modes. Supports "meta:<name>" for specific metadata columns.
|
||||
///
|
||||
/// # Variants
|
||||
///
|
||||
/// * `Id` - Item ID column.
|
||||
/// * `Time` - Timestamp column.
|
||||
/// * `Size` - Content size column.
|
||||
/// * `Compression` - Compression type column.
|
||||
/// * `FileSize` - On-disk file size column.
|
||||
/// * `FilePath` - File path column.
|
||||
/// * `Tags` - Tags column.
|
||||
/// * `Meta` - Metadata column (with sub-type via string parsing).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use keep::modes::common::ColumnType;
|
||||
/// assert_eq!(ColumnType::from_str("id").unwrap(), ColumnType::Id);
|
||||
/// assert_eq!(ColumnType::from_str("meta:hostname").unwrap(), ColumnType::Meta);
|
||||
/// ```
|
||||
pub enum ColumnType {
|
||||
Id,
|
||||
Time,
|
||||
@@ -89,111 +151,48 @@ pub enum ColumnType {
|
||||
Meta,
|
||||
}
|
||||
|
||||
impl ColumnType {
|
||||
/// Returns a Result with error message if the string is not a valid ColumnType
|
||||
pub fn from_str(s: &str) -> anyhow::Result<Self> {
|
||||
Ok(Self::try_from(s)?)
|
||||
}
|
||||
}
|
||||
|
||||
// impl TryFrom<&str> for ColumnType is already implemented by strum_macros
|
||||
// so we remove this conflicting implementation
|
||||
|
||||
pub fn get_format_box_chars_no_border_line_separator() -> TableFormat {
|
||||
prettytable::format::FormatBuilder::new()
|
||||
.column_separator('│')
|
||||
.borders('│')
|
||||
.separators(
|
||||
&[prettytable::format::LinePosition::Top],
|
||||
prettytable::format::LineSeparator::new('─', '┬', '┌', '┐'),
|
||||
)
|
||||
.separators(
|
||||
&[prettytable::format::LinePosition::Title],
|
||||
prettytable::format::LineSeparator::new('─', '┼', '├', '┤'),
|
||||
)
|
||||
.separators(
|
||||
&[prettytable::format::LinePosition::Bottom],
|
||||
prettytable::format::LineSeparator::new('─', '┴', '└', '┘'),
|
||||
)
|
||||
.padding(1, 1)
|
||||
.build()
|
||||
}
|
||||
|
||||
pub fn get_digest_type_meta(digest_type: MetaPluginType) -> String {
|
||||
format!("digest_{}", digest_type.to_string().to_lowercase())
|
||||
}
|
||||
|
||||
|
||||
pub fn cmd_args_digest_type(cmd: &mut Command, args: &Args) -> MetaPluginType {
|
||||
let digest_name = args
|
||||
.item
|
||||
.digest
|
||||
.clone()
|
||||
.unwrap_or(MetaPluginType::DigestSha256.to_string());
|
||||
|
||||
let digest_type_opt = MetaPluginType::from_str(&digest_name);
|
||||
if digest_type_opt.is_err() {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
format!("Invalid digest algorithm '{}'. Use 'sha256' or 'md5'", digest_name),
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
|
||||
digest_type_opt.unwrap()
|
||||
}
|
||||
|
||||
pub fn cmd_args_compression_type(cmd: &mut Command, args: &Args) -> CompressionType {
|
||||
let compression_name = args
|
||||
.item
|
||||
.compression
|
||||
.clone()
|
||||
.unwrap_or(CompressionType::LZ4.to_string());
|
||||
|
||||
let compression_type_opt = CompressionType::from_str(&compression_name);
|
||||
if compression_type_opt.is_err() {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
format!("Invalid compression algorithm '{}'. Supported algorithms: lz4, gzip, xz, zstd", compression_name),
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
|
||||
compression_type_opt.unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum OutputFormat {
|
||||
Table,
|
||||
Json,
|
||||
Yaml,
|
||||
}
|
||||
|
||||
impl FromStr for OutputFormat {
|
||||
impl std::str::FromStr for ColumnType {
|
||||
type Err = anyhow::Error;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"table" => Ok(OutputFormat::Table),
|
||||
"json" => Ok(OutputFormat::Json),
|
||||
"yaml" => Ok(OutputFormat::Yaml),
|
||||
_ => Err(anyhow::anyhow!("Invalid output format. Supported formats: table, json, yaml")),
|
||||
|
||||
fn from_str(s: &str) -> anyhow::Result<Self> {
|
||||
let lower_s = s.to_lowercase();
|
||||
if s.starts_with("meta:") {
|
||||
Ok(ColumnType::Meta)
|
||||
} else {
|
||||
for variant in ColumnType::iter() {
|
||||
if variant.to_string().to_lowercase() == lower_s {
|
||||
return Ok(variant);
|
||||
}
|
||||
}
|
||||
Err(anyhow::anyhow!("Invalid column type: {}", s))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_output_format(args: &Args) -> OutputFormat {
|
||||
args.options.output_format
|
||||
.as_ref()
|
||||
.and_then(|s| OutputFormat::from_str(s).ok())
|
||||
.unwrap_or(OutputFormat::Table)
|
||||
}
|
||||
|
||||
pub fn cmd_args_meta_plugin_types(cmd: &mut Command, args: &Args) -> Vec<MetaPluginType> {
|
||||
/// Extracts configured meta plugin types from settings and command.
|
||||
///
|
||||
/// Handles comma-separated plugin names and validates against registered types.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Mutable Clap command for error reporting.
|
||||
/// * `settings` - Application settings with plugin config.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Vec<MetaPluginType>` - List of enabled plugin types.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Exits via Clap error if unknown plugin type specified.
|
||||
pub fn settings_meta_plugin_types(
|
||||
cmd: &mut Command,
|
||||
settings: &config::Settings,
|
||||
) -> Vec<MetaPluginType> {
|
||||
let mut meta_plugin_types = Vec::new();
|
||||
|
||||
// Handle comma-separated values in each meta_plugins argument
|
||||
for meta_plugin_names_str in &args.item.meta_plugins {
|
||||
for meta_plugin_names_str in &settings.meta_plugins_names() {
|
||||
let meta_plugin_names: Vec<&str> = meta_plugin_names_str.split(',').collect();
|
||||
|
||||
for name in meta_plugin_names {
|
||||
@@ -205,8 +204,9 @@ pub fn cmd_args_meta_plugin_types(cmd: &mut Command, args: &Args) -> Vec<MetaPlu
|
||||
// Try to find the MetaPluginType by meta name
|
||||
let mut found = false;
|
||||
for meta_plugin_type in MetaPluginType::iter() {
|
||||
let mut meta_plugin = crate::meta_plugin::get_meta_plugin(meta_plugin_type.clone());
|
||||
if meta_plugin.meta_name() == trimmed_name {
|
||||
let meta_plugin =
|
||||
crate::meta_plugin::get_meta_plugin(meta_plugin_type.clone(), None, None);
|
||||
if meta_plugin.meta_type().to_string() == trimmed_name {
|
||||
meta_plugin_types.push(meta_plugin_type);
|
||||
found = true;
|
||||
break;
|
||||
@@ -225,3 +225,220 @@ pub fn cmd_args_meta_plugin_types(cmd: &mut Command, args: &Args) -> Vec<MetaPlu
|
||||
|
||||
meta_plugin_types
|
||||
}
|
||||
|
||||
/// Determines compression type from settings and command arguments.
|
||||
///
|
||||
/// Validates the compression name and returns the corresponding enum variant.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Mutable Clap command for error reporting.
|
||||
/// * `settings` - Application settings.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `CompressionType` - The resolved compression type.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Exits via Clap error if invalid compression specified.
|
||||
pub fn settings_compression_type(
|
||||
cmd: &mut Command,
|
||||
settings: &config::Settings,
|
||||
) -> CompressionType {
|
||||
let compression_name = settings
|
||||
.compression()
|
||||
.unwrap_or(CompressionType::LZ4.to_string());
|
||||
|
||||
let compression_type_opt = CompressionType::from_str(&compression_name);
|
||||
if compression_type_opt.is_err() {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
format!(
|
||||
"Invalid compression algorithm '{}'. Supported algorithms: lz4, gzip, xz, zstd",
|
||||
compression_name
|
||||
),
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
|
||||
compression_type_opt.unwrap()
|
||||
}
|
||||
|
||||
/// Parses output format from settings.
|
||||
///
|
||||
/// Defaults to `Table` if not specified or invalid. Uses case-insensitive string parsing.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `settings` - Application settings with optional output_format field.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `OutputFormat` - Parsed enum variant or Table as default.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let format = settings_output_format(&settings);
|
||||
/// assert_eq!(format, OutputFormat::Json); // If settings.output_format = Some("json")
|
||||
/// ```
|
||||
pub fn settings_output_format(settings: &config::Settings) -> OutputFormat {
|
||||
settings
|
||||
.output_format
|
||||
.as_ref()
|
||||
.and_then(|s| OutputFormat::from_str(s).ok())
|
||||
.unwrap_or(OutputFormat::Table)
|
||||
}
|
||||
|
||||
/// Trims trailing whitespace from each line in a multi-line string.
|
||||
///
|
||||
/// Useful for cleaning up table output before printing. Preserves newlines but removes spaces/tabs at line ends.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `s` - Input string with potential trailing whitespace, e.g., "line1 \nline2 ".
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `String` - Cleaned string with trimmed lines, e.g., "line1\nline2".
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let cleaned = trim_lines_end("line1 \nline2 ");
|
||||
/// assert_eq!(cleaned, "line1\nline2");
|
||||
/// ```
|
||||
pub fn trim_lines_end(s: &str) -> String {
|
||||
s.lines()
|
||||
.map(|line| line.trim_end())
|
||||
.collect::<Vec<&str>>()
|
||||
.join("\n")
|
||||
}
|
||||
|
||||
/// Creates a new table with styling based on terminal detection.
|
||||
///
|
||||
/// Loads appropriate preset (UTF8 or ASCII) if styling is enabled.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `use_styling` - If true, apply visual styling.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Table` - Configured table instance.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let table = create_table(true);
|
||||
/// table.add_row(vec!["Header1", "Header2"]);
|
||||
/// ```
|
||||
pub fn create_table(use_styling: bool) -> Table {
|
||||
let mut table = Table::new();
|
||||
table.set_content_arrangement(ContentArrangement::Dynamic);
|
||||
|
||||
if use_styling {
|
||||
if std::io::stdout().is_terminal() {
|
||||
table
|
||||
.load_preset(comfy_table::presets::UTF8_FULL)
|
||||
.apply_modifier(comfy_table::modifiers::UTF8_SOLID_INNER_BORDERS);
|
||||
} else {
|
||||
table.load_preset(comfy_table::presets::ASCII_FULL);
|
||||
}
|
||||
} else {
|
||||
table.load_preset(comfy_table::presets::NOTHING);
|
||||
}
|
||||
|
||||
if !std::io::stdout().is_terminal() {
|
||||
table.force_no_tty();
|
||||
}
|
||||
table
|
||||
}
|
||||
|
||||
/// Creates a table configured from application table settings.
|
||||
///
|
||||
/// Applies style presets, modifiers, content arrangement, and truncation indicators.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `table_config` - Table configuration from settings.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Table` - Fully configured table.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let config = TableConfig::default();
|
||||
/// let table = create_table_with_config(&config);
|
||||
/// ```
|
||||
pub fn create_table_with_config(table_config: &crate::config::TableConfig) -> Table {
|
||||
let mut table = Table::new();
|
||||
|
||||
// Set content arrangement
|
||||
match table_config.content_arrangement {
|
||||
crate::config::ContentArrangement::Dynamic => {
|
||||
table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic)
|
||||
}
|
||||
crate::config::ContentArrangement::DynamicFullWidth => {
|
||||
table.set_content_arrangement(comfy_table::ContentArrangement::DynamicFullWidth)
|
||||
}
|
||||
crate::config::ContentArrangement::Disabled => {
|
||||
table.set_content_arrangement(comfy_table::ContentArrangement::Disabled)
|
||||
}
|
||||
};
|
||||
|
||||
// Set style preset
|
||||
match &table_config.style {
|
||||
crate::config::TableStyle::Ascii => {
|
||||
table.load_preset(comfy_table::presets::ASCII_FULL);
|
||||
}
|
||||
crate::config::TableStyle::Utf8 => {
|
||||
table.load_preset(comfy_table::presets::UTF8_FULL);
|
||||
}
|
||||
crate::config::TableStyle::Utf8Full => {
|
||||
table.load_preset(comfy_table::presets::UTF8_FULL);
|
||||
}
|
||||
crate::config::TableStyle::Nothing => {
|
||||
table.load_preset(comfy_table::presets::NOTHING);
|
||||
}
|
||||
crate::config::TableStyle::Custom(preset) => {
|
||||
// For custom presets, we'd need to parse the string
|
||||
// This is a placeholder for custom preset handling
|
||||
if preset == "ASCII_FULL" {
|
||||
table.load_preset(comfy_table::presets::ASCII_FULL);
|
||||
} else if preset == "UTF8_FULL" {
|
||||
table.load_preset(comfy_table::presets::UTF8_FULL);
|
||||
} else if preset == "NOTHING" {
|
||||
table.load_preset(comfy_table::presets::NOTHING);
|
||||
}
|
||||
// Add more presets as needed
|
||||
}
|
||||
};
|
||||
|
||||
// Apply modifiers
|
||||
for modifier in &table_config.modifiers {
|
||||
match modifier.as_str() {
|
||||
"UTF8_SOLID_INNER_BORDERS" => {
|
||||
table.apply_modifier(comfy_table::modifiers::UTF8_SOLID_INNER_BORDERS);
|
||||
}
|
||||
"UTF8_ROUND_CORNERS" => {
|
||||
table.apply_modifier(comfy_table::modifiers::UTF8_ROUND_CORNERS);
|
||||
}
|
||||
_ => {} // Ignore unknown modifiers
|
||||
}
|
||||
}
|
||||
|
||||
// Set truncation indicator if specified
|
||||
if !table_config.truncation_indicator.is_empty() {
|
||||
table.set_truncation_indicator(&table_config.truncation_indicator);
|
||||
}
|
||||
|
||||
if !std::io::stdout().is_terminal() {
|
||||
table.force_no_tty();
|
||||
}
|
||||
|
||||
table
|
||||
}
|
||||
|
||||
@@ -1,53 +1,76 @@
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use std::fs;
|
||||
use anyhow::Result;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::db;
|
||||
use crate::config;
|
||||
use crate::services::error::CoreError;
|
||||
use crate::services::item_service::ItemService;
|
||||
use clap::Command;
|
||||
use clap::error::ErrorKind;
|
||||
use log::{debug, warn};
|
||||
use log::warn;
|
||||
use rusqlite::Connection;
|
||||
|
||||
/// Handles the delete mode: removes items by ID from the database and storage.
|
||||
///
|
||||
/// This function processes a list of item IDs, attempting to delete each from
|
||||
/// both the database and the underlying file storage. It skips items that are
|
||||
/// not found and logs warnings for them. Validation of arguments (e.g., ensuring
|
||||
/// IDs are provided and tags are empty) is handled at the clap parsing level.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_cmd` - Clap command for error handling (unused).
|
||||
/// * `_settings` - Global settings (unused).
|
||||
/// * `_config` - Configuration settings (unused).
|
||||
/// * `ids` - List of item IDs to delete.
|
||||
/// * `_tags` - Tags (unused, as delete only supports IDs).
|
||||
/// * `conn` - Database connection.
|
||||
/// * `data_path` - Path to data directory for storage cleanup.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Result<()>` on success, or an error if deletion fails for any item.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an `anyhow::Error` if a deletion operation fails due to database
|
||||
/// or I/O issues (excluding `ItemNotFound`, which is handled gracefully).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// // This would be called from main after parsing args
|
||||
/// mode_delete(&mut cmd, &settings, &config, &mut vec![1, 2], &mut vec![], &mut conn, data_path)?;
|
||||
/// ```
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// None.
|
||||
pub fn mode_delete(
|
||||
cmd: &mut Command,
|
||||
_args: &crate::Args,
|
||||
ids: &mut Vec<i64>,
|
||||
tags: &mut Vec<String>,
|
||||
_cmd: &mut Command,
|
||||
_settings: &config::Settings,
|
||||
_config: &config::Settings,
|
||||
ids: &mut [i64],
|
||||
_tags: &mut [String],
|
||||
conn: &mut Connection,
|
||||
data_path: PathBuf,
|
||||
) -> Result<()> {
|
||||
if ids.is_empty() {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"No ID given, you must supply atleast one ID when using --delete",
|
||||
)
|
||||
.exit();
|
||||
} else if !tags.is_empty() {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"Tags given but not supported, you must supply atleast one ID when using --delete",
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
// Validation is now handled at the argument parsing level
|
||||
// So we can assume ids is not empty and tags is empty
|
||||
|
||||
let item_service = ItemService::new(data_path);
|
||||
|
||||
for item_id in ids.iter() {
|
||||
if let Some(item) = db::get_item(conn, *item_id)? {
|
||||
debug!("MAIN: Found item {:?}", item);
|
||||
db::delete_item(conn, item)?;
|
||||
|
||||
// Validate that item ID is positive to prevent path traversal issues
|
||||
if *item_id <= 0 {
|
||||
return Err(anyhow!("Invalid item ID: {}", item_id));
|
||||
}
|
||||
|
||||
let mut item_path = data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
|
||||
fs::remove_file(&item_path)
|
||||
.context(anyhow!("Unable to remove item file {:?}", item_path))?;
|
||||
} else {
|
||||
match item_service.delete_item(conn, *item_id) {
|
||||
Ok(_) => {}
|
||||
Err(e) => match e {
|
||||
CoreError::ItemNotFound(_) => {
|
||||
warn!("Unable to find item {item_id} in database");
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow::Error::from(e)
|
||||
.context(format!("Failed to delete item {}", item_id)));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,405 +1,145 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use crate::config;
|
||||
use crate::services::item_service::ItemService;
|
||||
/// Diff mode implementation.
|
||||
///
|
||||
/// This module provides functionality for comparing two items and displaying their
|
||||
/// differences using external diff tools.
|
||||
use anyhow::{Context, Result};
|
||||
use clap::Command;
|
||||
use std::io::Read;
|
||||
use std::os::fd::FromRawFd;
|
||||
use std::str::FromStr;
|
||||
use log::debug;
|
||||
|
||||
fn validate_diff_args(cmd: &mut Command, ids: &Vec<i64>, tags: &Vec<String>) {
|
||||
fn validate_diff_args(
|
||||
_cmd: &mut Command,
|
||||
ids: &Vec<i64>,
|
||||
tags: &Vec<String>,
|
||||
) -> anyhow::Result<()> {
|
||||
if !tags.is_empty() {
|
||||
cmd.error(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
"Tags are not supported with --diff. Please provide exactly two IDs.",
|
||||
)
|
||||
.exit();
|
||||
return Err(anyhow::anyhow!(
|
||||
"Tags are not supported with --diff. Please provide exactly two IDs."
|
||||
));
|
||||
}
|
||||
if ids.len() != 2 {
|
||||
cmd.error(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
"You must supply exactly two IDs when using --diff.",
|
||||
)
|
||||
.exit();
|
||||
return Err(anyhow::anyhow!(
|
||||
"You must supply exactly two IDs when using --diff."
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Fetches and validates items from the database for diff operation.
|
||||
///
|
||||
/// This function retrieves two items by their IDs from the database using the
|
||||
/// item service, which handles validation, and returns them as a tuple.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Mutable reference to the database connection.
|
||||
/// * `ids` - Vector of item IDs to fetch.
|
||||
/// * `item_service` - Reference to the item service for validation.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(ItemWithMeta, ItemWithMeta)>` - Tuple of items with metadata or error.
|
||||
fn fetch_and_validate_items(
|
||||
conn: &mut rusqlite::Connection,
|
||||
ids: &Vec<i64>,
|
||||
) -> Result<(crate::db::Item, crate::db::Item), anyhow::Error> {
|
||||
// Fetch items, ensuring they exist.
|
||||
let item_a = crate::db::get_item(conn, ids[0])?
|
||||
.ok_or_else(|| anyhow::anyhow!("Unable to find first item (ID: {}) in database", ids[0]))?;
|
||||
let item_b = crate::db::get_item(conn, ids[1])?
|
||||
.ok_or_else(|| anyhow::anyhow!("Unable to find second item (ID: {}) in database", ids[1]))?;
|
||||
ids: &[i64],
|
||||
item_service: &ItemService,
|
||||
) -> Result<(
|
||||
crate::services::types::ItemWithMeta,
|
||||
crate::services::types::ItemWithMeta,
|
||||
)> {
|
||||
// Fetch items using the service, which handles validation
|
||||
let item_a = item_service
|
||||
.get_item(conn, ids[0])
|
||||
.with_context(|| format!("Unable to find first item (ID: {}) in database", ids[0]))?;
|
||||
let item_b = item_service
|
||||
.get_item(conn, ids[1])
|
||||
.with_context(|| format!("Unable to find second item (ID: {}) in database", ids[1]))?;
|
||||
|
||||
log::debug!("MAIN: Found item A {:?}", item_a);
|
||||
log::debug!("MAIN: Found item B {:?}", item_b);
|
||||
|
||||
let item_a_id = item_a.id.ok_or_else(|| anyhow!("Item A missing ID"))?;
|
||||
let item_b_id = item_b.id.ok_or_else(|| anyhow!("Item B missing ID"))?;
|
||||
|
||||
// Validate that item IDs are positive to prevent path traversal issues
|
||||
if item_a_id <= 0 || item_b_id <= 0 {
|
||||
return Err(anyhow::anyhow!("Invalid item ID: {} or {}", item_a_id, item_b_id));
|
||||
}
|
||||
debug!("MAIN: Found item A {:?}", item_a.item);
|
||||
debug!("MAIN: Found item B {:?}", item_b.item);
|
||||
|
||||
Ok((item_a, item_b))
|
||||
}
|
||||
|
||||
fn get_item_tags(conn: &mut rusqlite::Connection, item: &crate::db::Item) -> Result<Vec<String>, anyhow::Error> {
|
||||
let tags: Vec<String> = crate::db::get_item_tags(conn, item)?
|
||||
.into_iter()
|
||||
.map(|x| x.name)
|
||||
.collect();
|
||||
Ok(tags)
|
||||
}
|
||||
|
||||
/// Sets up file paths and compression for diff operation.
|
||||
///
|
||||
/// This function constructs the file paths for the two items and prepares the
|
||||
/// compression engines needed for reading their contents.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_service` - Reference to the item service.
|
||||
/// * `item_a` - First item with metadata.
|
||||
/// * `item_b` - Second item with metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(PathBuf, PathBuf)>` - Tuple of item file paths or error.
|
||||
fn setup_diff_paths_and_compression(
|
||||
data_path: &std::path::PathBuf,
|
||||
item_a: &crate::db::Item,
|
||||
item_b: &crate::db::Item,
|
||||
) -> Result<(std::path::PathBuf, crate::compression_engine::CompressionType, std::path::PathBuf, crate::compression_engine::CompressionType), anyhow::Error> {
|
||||
let item_a_id = item_a.id.ok_or_else(|| anyhow::anyhow!("Item A missing ID"))?;
|
||||
let item_b_id = item_b.id.ok_or_else(|| anyhow::anyhow!("Item B missing ID"))?;
|
||||
item_service: &ItemService,
|
||||
item_a: &crate::services::types::ItemWithMeta,
|
||||
item_b: &crate::services::types::ItemWithMeta,
|
||||
) -> Result<(std::path::PathBuf, std::path::PathBuf)> {
|
||||
let item_a_id = item_a
|
||||
.item
|
||||
.id
|
||||
.ok_or_else(|| anyhow::anyhow!("Item A missing ID"))?;
|
||||
let item_b_id = item_b
|
||||
.item
|
||||
.id
|
||||
.ok_or_else(|| anyhow::anyhow!("Item B missing ID"))?;
|
||||
|
||||
let mut item_path_a = data_path.clone();
|
||||
item_path_a.push(item_a_id.to_string());
|
||||
let compression_type_a = crate::compression_engine::CompressionType::from_str(&item_a.compression)?;
|
||||
log::debug!("MAIN: Item A has compression type {:?}", compression_type_a);
|
||||
// Use the service's data path to construct proper file paths
|
||||
let data_path = item_service.get_data_path();
|
||||
let item_a_path = data_path.join(item_a_id.to_string());
|
||||
let item_b_path = data_path.join(item_b_id.to_string());
|
||||
|
||||
let mut item_path_b = data_path.clone();
|
||||
item_path_b.push(item_b_id.to_string());
|
||||
let compression_type_b = crate::compression_engine::CompressionType::from_str(&item_b.compression)?;
|
||||
log::debug!("MAIN: Item B has compression type {:?}", compression_type_b);
|
||||
|
||||
Ok((item_path_a, compression_type_a, item_path_b, compression_type_b))
|
||||
}
|
||||
|
||||
fn setup_diff_pipes() -> Result<((libc::c_int, libc::c_int), (libc::c_int, libc::c_int)), anyhow::Error> {
|
||||
use nix::unistd::pipe;
|
||||
use nix::Error as NixError;
|
||||
|
||||
// Create pipes for diff's input
|
||||
let (fd_a_read, fd_a_write) =
|
||||
pipe().map_err(|e: NixError| anyhow::anyhow!("Failed to create pipe A: {}", e))?;
|
||||
let (fd_b_read, fd_b_write) =
|
||||
pipe().map_err(|e: NixError| anyhow::anyhow!("Failed to create pipe B: {}", e))?;
|
||||
|
||||
Ok(((fd_a_read, fd_a_write), (fd_b_read, fd_b_write)))
|
||||
}
|
||||
|
||||
fn setup_fd_guards(fd_a_read: libc::c_int, fd_b_read: libc::c_int) -> (FdGuard, FdGuard) {
|
||||
// Wrap file descriptors in RAII guards
|
||||
let fd_a_read_guard = FdGuard::new(fd_a_read);
|
||||
let fd_b_read_guard = FdGuard::new(fd_b_read);
|
||||
(fd_a_read_guard, fd_b_read_guard)
|
||||
}
|
||||
|
||||
fn set_fd_cloexec(fd_a_write: libc::c_int, fd_b_write: libc::c_int) -> Result<(), anyhow::Error> {
|
||||
use nix::fcntl::{fcntl, FcntlArg, FdFlag};
|
||||
|
||||
// Set FD_CLOEXEC on write ends
|
||||
fcntl(
|
||||
fd_a_write,
|
||||
FcntlArg::F_SETFD(FdFlag::FD_CLOEXEC),
|
||||
)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to set FD_CLOEXEC on fd_a_write: {}", e))?;
|
||||
fcntl(
|
||||
fd_b_write,
|
||||
FcntlArg::F_SETFD(FdFlag::FD_CLOEXEC),
|
||||
)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to set FD_CLOEXEC on fd_b_write: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn spawn_diff_process(
|
||||
item_a_id: i64,
|
||||
item_a_tags: Vec<String>,
|
||||
item_b_id: i64,
|
||||
item_b_tags: Vec<String>,
|
||||
fd_a_read: libc::c_int,
|
||||
fd_b_read: libc::c_int,
|
||||
) -> Result<std::process::Child, anyhow::Error> {
|
||||
log::debug!("MAIN: Creating child process for diff");
|
||||
let mut diff_command = std::process::Command::new("diff");
|
||||
diff_command
|
||||
.arg("-u")
|
||||
.arg("--label")
|
||||
.arg(format!(
|
||||
"Keep item A: {} {}",
|
||||
item_a_id,
|
||||
item_a_tags.join(" ")
|
||||
))
|
||||
.arg(format!("/dev/fd/{}", fd_a_read))
|
||||
.arg("--label")
|
||||
.arg(format!(
|
||||
"Keep item B: {} {}",
|
||||
item_b_id,
|
||||
item_b_tags.join(" ")
|
||||
))
|
||||
.arg(format!("/dev/fd/{}", fd_b_read))
|
||||
.stdin(std::process::Stdio::null())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped());
|
||||
|
||||
let child_process = diff_command
|
||||
.spawn()
|
||||
.map_err(|e| anyhow::anyhow!("Failed to execute diff command: {}", e))?;
|
||||
|
||||
Ok(child_process)
|
||||
}
|
||||
|
||||
// RAII guard for file descriptors to ensure they're closed
|
||||
struct FdGuard {
|
||||
fd: libc::c_int,
|
||||
}
|
||||
|
||||
impl FdGuard {
|
||||
fn new(fd: libc::c_int) -> Self {
|
||||
Self { fd }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for FdGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = nix::unistd::close(self.fd);
|
||||
}
|
||||
}
|
||||
|
||||
// Create a function to write item data to a pipe
|
||||
fn write_item_to_pipe(
|
||||
item_path: std::path::PathBuf,
|
||||
compression_type: crate::compression_engine::CompressionType,
|
||||
pipe_writer_raw: std::fs::File,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
use std::io::BufWriter;
|
||||
let mut buffered_pipe_writer = BufWriter::new(pipe_writer_raw);
|
||||
let engine =
|
||||
crate::compression_engine::get_compression_engine(compression_type).expect("Unable to get compression engine");
|
||||
log::debug!("THREAD: Sending item to diff");
|
||||
engine
|
||||
.copy(item_path, &mut buffered_pipe_writer)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to copy/compress item: {}", e))?;
|
||||
log::debug!("THREAD: Done sending item to diff");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Function to spawn a writer thread for an item
|
||||
fn spawn_writer_thread(
|
||||
item_path: std::path::PathBuf,
|
||||
compression_type: crate::compression_engine::CompressionType,
|
||||
fd_write: libc::c_int,
|
||||
) -> std::thread::JoinHandle<Result<(), anyhow::Error>> {
|
||||
let pipe_writer_raw = unsafe { std::fs::File::from_raw_fd(fd_write) };
|
||||
std::thread::spawn(move || {
|
||||
write_item_to_pipe(item_path, compression_type, pipe_writer_raw)
|
||||
})
|
||||
}
|
||||
|
||||
fn execute_diff_command(
|
||||
child_process: &mut std::process::Child,
|
||||
) -> Result<(Vec<u8>, Vec<u8>), anyhow::Error> {
|
||||
let mut child_stdout_pipe = child_process
|
||||
.stdout
|
||||
.take()
|
||||
.expect("BUG: Failed to capture diff stdout pipe");
|
||||
let mut child_stderr_pipe = child_process
|
||||
.stderr
|
||||
.take()
|
||||
.expect("BUG: Failed to capture diff stderr pipe");
|
||||
|
||||
log::debug!("MAIN: Creating threads for diff I/O");
|
||||
|
||||
// Thread to read diff's standard output
|
||||
let stdout_reader_thread = std::thread::spawn(move || {
|
||||
let mut output_buffer = Vec::new();
|
||||
log::debug!("STDOUT_READER: Reading diff stdout");
|
||||
// child_stdout_pipe is a ChildStdout, which implements std::io::Read
|
||||
child_stdout_pipe
|
||||
.read_to_end(&mut output_buffer)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read diff stdout: {}", e))
|
||||
.map(|_| output_buffer) // Return the Vec<u8> on success
|
||||
});
|
||||
|
||||
// Thread to read diff's standard error
|
||||
let stderr_reader_thread = std::thread::spawn(move || {
|
||||
let mut error_buffer = Vec::new();
|
||||
log::debug!("STDERR_READER: Reading diff stderr");
|
||||
child_stderr_pipe
|
||||
.read_to_end(&mut error_buffer)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read diff stderr: {}", e))
|
||||
.map(|_| error_buffer)
|
||||
});
|
||||
|
||||
// Retrieve the captured output from the reader threads.
|
||||
let stdout_capture_result = stdout_reader_thread
|
||||
.join()
|
||||
.map_err(|panic_payload| {
|
||||
anyhow::anyhow!("Stdout reader thread panicked: {:?}", panic_payload)
|
||||
})?
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read diff stdout: {}", e))?;
|
||||
|
||||
let stderr_capture_result = stderr_reader_thread
|
||||
.join()
|
||||
.map_err(|panic_payload| {
|
||||
anyhow::anyhow!("Stderr reader thread panicked: {:?}", panic_payload)
|
||||
})?
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read diff stderr: {}", e))?;
|
||||
|
||||
Ok((stdout_capture_result, stderr_capture_result))
|
||||
}
|
||||
|
||||
fn handle_diff_output(
|
||||
diff_status: std::process::ExitStatus,
|
||||
stdout_capture_result: Vec<u8>,
|
||||
stderr_capture_result: Vec<u8>,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
// Handle diff's exit status and output
|
||||
match diff_status.code() {
|
||||
Some(0) => {
|
||||
// Exit code 0: No differences
|
||||
log::debug!("MAIN: Diff successful, no differences found.");
|
||||
// Typically, diff -u doesn't print to stdout if no differences.
|
||||
// But if it did, it would be shown here.
|
||||
if !stdout_capture_result.is_empty() {
|
||||
println!("{}", String::from_utf8_lossy(&stdout_capture_result));
|
||||
}
|
||||
}
|
||||
Some(1) => {
|
||||
// Exit code 1: Differences found
|
||||
log::debug!("MAIN: Diff successful, differences found.");
|
||||
println!("{}", String::from_utf8_lossy(&stdout_capture_result));
|
||||
}
|
||||
Some(error_code) => {
|
||||
// Exit code > 1: Error in diff utility
|
||||
eprintln!("Diff command failed with exit code: {}", error_code);
|
||||
if !stdout_capture_result.is_empty() {
|
||||
eprintln!(
|
||||
"Diff stdout before error:\n{}",
|
||||
String::from_utf8_lossy(&stdout_capture_result)
|
||||
);
|
||||
}
|
||||
if !stderr_capture_result.is_empty() {
|
||||
eprintln!(
|
||||
"Diff stderr:\n{}",
|
||||
String::from_utf8_lossy(&stderr_capture_result)
|
||||
);
|
||||
}
|
||||
return Err(anyhow::anyhow!(
|
||||
"Diff command reported an error (exit code {})",
|
||||
error_code
|
||||
));
|
||||
}
|
||||
None => {
|
||||
// Process terminated by a signal
|
||||
eprintln!("Diff command terminated by signal.");
|
||||
if !stderr_capture_result.is_empty() {
|
||||
eprintln!(
|
||||
"Diff stderr before signal termination:\n{}",
|
||||
String::from_utf8_lossy(&stderr_capture_result)
|
||||
);
|
||||
}
|
||||
return Err(anyhow::anyhow!("Diff command terminated by signal"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok((item_a_path, item_b_path))
|
||||
}
|
||||
|
||||
pub fn mode_diff(
|
||||
cmd: &mut Command,
|
||||
_args: &crate::Args,
|
||||
ids: &mut Vec<i64>,
|
||||
tags: &mut Vec<String>,
|
||||
args: &crate::args::Args,
|
||||
conn: &mut rusqlite::Connection,
|
||||
data_path: std::path::PathBuf,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
validate_diff_args(cmd, ids, tags);
|
||||
let (item_a, item_b) = fetch_and_validate_items(conn, ids)?;
|
||||
|
||||
let item_a_tags = get_item_tags(conn, &item_a)?;
|
||||
let item_b_tags = get_item_tags(conn, &item_b)?;
|
||||
|
||||
let (item_path_a, compression_type_a, item_path_b, compression_type_b) =
|
||||
setup_diff_paths_and_compression(&data_path, &item_a, &item_b)?;
|
||||
|
||||
let ((fd_a_read, fd_a_write), (fd_b_read, fd_b_write)) = setup_diff_pipes()?;
|
||||
let (_fd_a_read_guard, _fd_b_read_guard) = setup_fd_guards(fd_a_read, fd_b_read);
|
||||
set_fd_cloexec(fd_a_write, fd_b_write)?;
|
||||
|
||||
let item_a_id = item_a.id.ok_or_else(|| anyhow::anyhow!("Item A missing ID"))?;
|
||||
let item_b_id = item_b.id.ok_or_else(|| anyhow::anyhow!("Item B missing ID"))?;
|
||||
|
||||
let mut child_process = spawn_diff_process(
|
||||
item_a_id,
|
||||
item_a_tags,
|
||||
item_b_id,
|
||||
item_b_tags,
|
||||
fd_a_read,
|
||||
fd_b_read,
|
||||
)?;
|
||||
|
||||
// Close read ends in parent process - they're now guarded by FdGuard
|
||||
drop(_fd_a_read_guard);
|
||||
drop(_fd_b_read_guard);
|
||||
|
||||
// Spawn writer threads for both items
|
||||
let writer_thread_a =
|
||||
spawn_writer_thread(item_path_a.clone(), compression_type_a.clone(), fd_a_write);
|
||||
|
||||
let writer_thread_b =
|
||||
spawn_writer_thread(item_path_b.clone(), compression_type_b.clone(), fd_b_write);
|
||||
|
||||
// Wait for writer threads to complete (meaning all input has been sent to diff)
|
||||
log::debug!("MAIN: Waiting on writer thread for item A");
|
||||
match writer_thread_a.join() {
|
||||
Ok(Ok(())) => {
|
||||
log::debug!("MAIN: Writer thread for item A completed successfully.");
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
return Err(anyhow::anyhow!("Writer thread for item A failed: {}", e));
|
||||
}
|
||||
Err(panic_payload) => {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Writer thread for item A (ID: {}) panicked: {:?}",
|
||||
ids[0],
|
||||
panic_payload
|
||||
));
|
||||
}
|
||||
) -> anyhow::Result<()> {
|
||||
let ids: Vec<i64> = args
|
||||
.ids_or_tags
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
if let crate::args::NumberOrString::Number(n) = x {
|
||||
Some(*n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
log::debug!("MAIN: Waiting on writer thread for item B");
|
||||
match writer_thread_b.join() {
|
||||
Ok(Ok(())) => {
|
||||
log::debug!("MAIN: Writer thread for item B completed successfully.");
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
return Err(anyhow::anyhow!("Writer thread for item B failed: {}", e));
|
||||
}
|
||||
Err(panic_payload) => {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Writer thread for item B (ID: {}) panicked: {:?}",
|
||||
ids[1],
|
||||
panic_payload
|
||||
));
|
||||
}
|
||||
let tags: Vec<String> = args
|
||||
.ids_or_tags
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
if let crate::args::NumberOrString::Str(s) = x {
|
||||
Some(s.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
log::debug!("MAIN: Done waiting on input-writer threads.");
|
||||
validate_diff_args(cmd, &ids, &tags)?;
|
||||
|
||||
// Now that all input has been sent and input pipes will be closed by threads exiting,
|
||||
// wait for the diff child process to terminate.
|
||||
log::debug!("MAIN: Waiting for diff child process to finish...");
|
||||
let diff_status = child_process
|
||||
.wait()
|
||||
.map_err(|e| anyhow::anyhow!("Failed to wait on diff command: {}", e))?;
|
||||
log::debug!(
|
||||
"MAIN: Diff child process finished with status: {}",
|
||||
diff_status
|
||||
);
|
||||
let settings = crate::config::Settings::new(args, crate::config::Settings::default_dir()?)?;
|
||||
|
||||
let (stdout_capture_result, stderr_capture_result) = execute_diff_command(&mut child_process)?;
|
||||
handle_diff_output(diff_status, stdout_capture_result, stderr_capture_result)?;
|
||||
let item_service = crate::services::item_service::ItemService::new(settings.dir.clone());
|
||||
|
||||
let (item_a, item_b) = fetch_and_validate_items(conn, &ids, &item_service)?;
|
||||
|
||||
let (path_a, path_b) = setup_diff_paths_and_compression(&item_service, &item_a, &item_b)?;
|
||||
|
||||
// TODO: Implement actual diff logic here
|
||||
// For now, just print paths or something to make it compile
|
||||
println!("Diff between {:?} and {:?}", path_a, path_b);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
234
src/modes/generate_config.rs
Normal file
234
src/modes/generate_config.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use crate::meta_plugin::MetaPlugin;
|
||||
use anyhow::Result;
|
||||
use clap::Command;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml;
|
||||
|
||||
/// Mode for generating a default configuration file.
|
||||
///
|
||||
/// This module creates a commented YAML template with default values for settings,
|
||||
/// including list format, server config, compression, and meta plugins.
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
/// Default configuration structure for the generated template.
|
||||
///
|
||||
/// Includes core settings, list formatting, server options, compression, and meta plugins.
|
||||
struct DefaultConfig {
|
||||
dir: Option<String>,
|
||||
list_format: Vec<ColumnConfig>,
|
||||
human_readable: bool,
|
||||
output_format: Option<String>,
|
||||
quiet: bool,
|
||||
force: bool,
|
||||
server: Option<ServerConfig>,
|
||||
compression_plugin: Option<CompressionPluginConfig>,
|
||||
meta_plugins: Option<Vec<MetaPluginConfig>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
/// Configuration for a column in the list format.
|
||||
struct ColumnConfig {
|
||||
name: String,
|
||||
label: Option<String>,
|
||||
#[serde(default)]
|
||||
align: ColumnAlignment,
|
||||
#[serde(default)]
|
||||
max_len: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
/// Alignment options for table columns.
|
||||
enum ColumnAlignment {
|
||||
#[default]
|
||||
Left,
|
||||
Right,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
/// Server configuration options.
|
||||
struct ServerConfig {
|
||||
address: Option<String>,
|
||||
port: Option<u16>,
|
||||
password_file: Option<String>,
|
||||
password: Option<String>,
|
||||
password_hash: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
/// Configuration for the compression plugin.
|
||||
struct CompressionPluginConfig {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
/// Configuration for a meta plugin.
|
||||
struct MetaPluginConfig {
|
||||
name: String,
|
||||
#[serde(default)]
|
||||
options: std::collections::HashMap<String, serde_yaml::Value>,
|
||||
#[serde(default)]
|
||||
outputs: std::collections::HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Generates and prints a default commented YAML configuration template.
|
||||
///
|
||||
/// Creates instances of available meta plugins to populate default options and outputs,
|
||||
/// then serializes the config to YAML with all lines commented for easy editing.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `_cmd` - Unused Clap command reference.
|
||||
/// * `_settings` - Unused settings reference.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Ok(())` on success.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// mode_generate_config(&mut cmd, &settings)?;
|
||||
/// ```
|
||||
pub fn mode_generate_config(_cmd: &mut Command, _settings: &crate::config::Settings) -> Result<()> {
|
||||
// Create instances of each meta plugin to get their default options and outputs
|
||||
let cwd_plugin = crate::meta_plugin::cwd::CwdMetaPlugin::new(None, None);
|
||||
let digest_plugin = crate::meta_plugin::digest::DigestMetaPlugin::new(None, None);
|
||||
let hostname_plugin = crate::meta_plugin::hostname::HostnameMetaPlugin::new(None, None);
|
||||
#[cfg(feature = "magic")]
|
||||
let magic_file_plugin = crate::meta_plugin::magic_file::MagicFileMetaPlugin::new(None, None);
|
||||
let env_plugin = crate::meta_plugin::env::EnvMetaPlugin::new(None, None);
|
||||
|
||||
// Create a default configuration
|
||||
let default_config = DefaultConfig {
|
||||
dir: Some("~/.local/share/keep".to_string()),
|
||||
list_format: vec![
|
||||
ColumnConfig {
|
||||
name: "id".to_string(),
|
||||
label: Some("Item".to_string()),
|
||||
align: ColumnAlignment::Right,
|
||||
max_len: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "time".to_string(),
|
||||
label: Some("Time".to_string()),
|
||||
align: ColumnAlignment::Right,
|
||||
max_len: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "size".to_string(),
|
||||
label: Some("Size".to_string()),
|
||||
align: ColumnAlignment::Right,
|
||||
max_len: None,
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "tags".to_string(),
|
||||
label: Some("Tags".to_string()),
|
||||
align: ColumnAlignment::Left,
|
||||
max_len: Some("40".to_string()),
|
||||
},
|
||||
ColumnConfig {
|
||||
name: "meta:hostname_full".to_string(),
|
||||
label: Some("Hostname".to_string()),
|
||||
align: ColumnAlignment::Left,
|
||||
max_len: Some("28".to_string()),
|
||||
},
|
||||
],
|
||||
human_readable: false,
|
||||
output_format: Some("table".to_string()),
|
||||
quiet: false,
|
||||
force: false,
|
||||
server: Some(ServerConfig {
|
||||
address: Some("127.0.0.1".to_string()),
|
||||
port: Some(8080),
|
||||
password_file: None,
|
||||
password: None,
|
||||
password_hash: None,
|
||||
}),
|
||||
compression_plugin: None,
|
||||
meta_plugins: Some(vec![
|
||||
MetaPluginConfig {
|
||||
name: "cwd".to_string(),
|
||||
options: cwd_plugin.options().clone(),
|
||||
outputs: convert_outputs_to_string_map(cwd_plugin.outputs()),
|
||||
},
|
||||
MetaPluginConfig {
|
||||
name: "digest".to_string(),
|
||||
options: digest_plugin.options().clone(),
|
||||
outputs: convert_outputs_to_string_map(digest_plugin.outputs()),
|
||||
},
|
||||
MetaPluginConfig {
|
||||
name: "hostname".to_string(),
|
||||
options: hostname_plugin.options().clone(),
|
||||
outputs: convert_outputs_to_string_map(hostname_plugin.outputs()),
|
||||
},
|
||||
#[cfg(feature = "magic")]
|
||||
MetaPluginConfig {
|
||||
name: "magic_file".to_string(),
|
||||
options: magic_file_plugin.options().clone(),
|
||||
outputs: convert_outputs_to_string_map(magic_file_plugin.outputs()),
|
||||
},
|
||||
MetaPluginConfig {
|
||||
name: "env".to_string(),
|
||||
options: env_plugin.options().clone(),
|
||||
outputs: convert_outputs_to_string_map(env_plugin.outputs()),
|
||||
},
|
||||
]),
|
||||
};
|
||||
|
||||
// Serialize to YAML and comment out all lines
|
||||
let yaml = serde_yaml::to_string(&default_config)?;
|
||||
|
||||
// Comment out every line
|
||||
let commented_yaml = yaml
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.trim().is_empty() {
|
||||
line.to_string()
|
||||
} else {
|
||||
format!("# {}", line)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n");
|
||||
|
||||
println!("{}", commented_yaml);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper function to convert outputs from serde_yaml::Value to String.
|
||||
///
|
||||
/// Handles null (uses key), strings, and other values by serializing to YAML string.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `outputs` - Reference to the outputs HashMap.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A HashMap with string keys and values.
|
||||
fn convert_outputs_to_string_map(
|
||||
outputs: &std::collections::HashMap<String, serde_yaml::Value>,
|
||||
) -> std::collections::HashMap<String, String> {
|
||||
let mut result = std::collections::HashMap::new();
|
||||
for (key, value) in outputs {
|
||||
match value {
|
||||
serde_yaml::Value::Null => {
|
||||
// For null, use the key as the value
|
||||
result.insert(key.clone(), key.clone());
|
||||
}
|
||||
serde_yaml::Value::String(s) => {
|
||||
result.insert(key.clone(), s.clone());
|
||||
}
|
||||
_ => {
|
||||
// Convert other values to their YAML string representation
|
||||
result.insert(
|
||||
key.clone(),
|
||||
serde_yaml::to_string(value).unwrap_or_default(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
166
src/modes/get.rs
166
src/modes/get.rs
@@ -1,114 +1,112 @@
|
||||
use anyhow::anyhow;
|
||||
use std::io::{Read, Write};
|
||||
use anyhow::{Result, anyhow};
|
||||
use std::io::Write;
|
||||
|
||||
use crate::compression_engine::{CompressionType, get_compression_engine};
|
||||
use crate::common::is_binary;
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::common::is_binary::is_binary;
|
||||
use crate::config;
|
||||
use crate::filter_plugin::FilterChain;
|
||||
use crate::services::item_service::ItemService;
|
||||
use clap::Command;
|
||||
use is_terminal::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// Handles the get mode: retrieves and streams item content to stdout, applying filters if specified.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Clap command for error handling.
|
||||
/// * `settings` - Global settings, including force output flag.
|
||||
/// * `ids` - List of item IDs (at most one).
|
||||
/// * `tags` - List of tags to match (mutually exclusive with IDs).
|
||||
/// * `conn` - Database connection.
|
||||
/// * `data_path` - Path to data directory.
|
||||
/// * `filter_chain` - Optional pre-parsed filter chain to apply to content.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Result<()>` on success, or an error if item not found or output fails.
|
||||
pub fn mode_get(
|
||||
cmd: &mut Command,
|
||||
args: &crate::Args,
|
||||
ids: &mut Vec<i64>,
|
||||
tags: &mut Vec<String>,
|
||||
settings: &config::Settings,
|
||||
ids: &mut [i64],
|
||||
tags: &mut [String],
|
||||
conn: &mut rusqlite::Connection,
|
||||
data_path: PathBuf,
|
||||
) -> anyhow::Result<()> {
|
||||
filter_chain: Option<FilterChain>,
|
||||
) -> Result<()> {
|
||||
if !ids.is_empty() && !tags.is_empty() {
|
||||
cmd.error(clap::error::ErrorKind::InvalidValue, "Both ID and tags given, you must supply exactly one ID or at least one tag when using --get").exit();
|
||||
cmd.error(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
"Both ID and tags given, you must supply either IDs or tags when using --get",
|
||||
)
|
||||
.exit();
|
||||
} else if ids.len() > 1 {
|
||||
cmd.error(clap::error::ErrorKind::InvalidValue, "More than one ID given, you must supply exactly one ID or at least one tag when using --get").exit();
|
||||
cmd.error(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
"More than one ID given, you must supply exactly one ID when using --get",
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
// If both are empty, find_item will find the last item
|
||||
|
||||
let mut meta: std::collections::HashMap<String, String> = std::collections::HashMap::new();
|
||||
for item in args.item.meta.iter() {
|
||||
let item = item.clone();
|
||||
meta.insert(item.key, item.value);
|
||||
}
|
||||
let item_service = ItemService::new(data_path.clone());
|
||||
let item_with_meta = item_service
|
||||
.find_item(conn, ids, tags, &std::collections::HashMap::new())
|
||||
.map_err(|e| anyhow!("Unable to find matching item in database: {}", e))?;
|
||||
|
||||
let item_maybe = match tags.is_empty() && meta.is_empty() {
|
||||
true => match ids.iter().next() {
|
||||
Some(item_id) => crate::db::get_item(conn, *item_id)?,
|
||||
None => crate::db::get_item_last(conn)?,
|
||||
},
|
||||
false => crate::db::get_item_matching(conn, tags, &meta)?,
|
||||
};
|
||||
|
||||
if let Some(item) = item_maybe {
|
||||
let item_id = item.id.ok_or_else(|| anyhow!("Item missing ID"))?;
|
||||
// Validate that item ID is positive to prevent path traversal issues
|
||||
if item_id <= 0 {
|
||||
return Err(anyhow!("Invalid item ID: {}", item_id));
|
||||
}
|
||||
|
||||
let mut item_path = data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
let item_id = item_with_meta.item.id.unwrap();
|
||||
|
||||
// Determine if we should detect binary data
|
||||
let mut detect_binary = !args.options.force && is_stdout_tty();
|
||||
let mut detect_binary = !settings.force && std::io::stdout().is_terminal();
|
||||
|
||||
// If we're detecting binary and there's binary metadata, check it
|
||||
if detect_binary {
|
||||
let item_meta = crate::db::get_item_meta(conn, &item)?;
|
||||
let binary_meta = item_meta.into_iter().find(|meta| meta.name == "binary");
|
||||
if let Some(binary_meta) = binary_meta {
|
||||
if binary_meta.value == "false" {
|
||||
// If metadata says it's not binary, don't detect
|
||||
let meta_map = item_with_meta.meta_as_map();
|
||||
if let Some(text_val) = meta_map.get("text") {
|
||||
if text_val == "true" {
|
||||
detect_binary = false;
|
||||
} else if binary_meta.value == "true" {
|
||||
// If metadata says it's binary, error immediately
|
||||
return Err(anyhow!("Refusing to output binary data to TTY, use --force to override"));
|
||||
} else if text_val == "false" {
|
||||
return Err(anyhow!(
|
||||
"Refusing to output binary data to TTY, use --force to override"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let compression_type = CompressionType::from_str(&item.compression)?;
|
||||
let compression_engine = get_compression_engine(compression_type)?;
|
||||
// Get a reader that applies the filters using the pre-parsed filter chain
|
||||
let (mut reader, _, _) = item_service.get_item_content_info_streaming_with_chain(
|
||||
conn,
|
||||
item_id,
|
||||
filter_chain.as_ref(),
|
||||
)?;
|
||||
|
||||
// If we need to detect binary, read first 4KB and check
|
||||
if detect_binary {
|
||||
// Open the file through compression engine to read first 4KB
|
||||
let mut reader = compression_engine.open(item_path.clone())?;
|
||||
let mut buffer = [0u8; 4096];
|
||||
let bytes_read = reader.read(&mut buffer)?;
|
||||
|
||||
// Check if this data is binary
|
||||
if is_binary(&buffer[..bytes_read]) {
|
||||
return Err(anyhow!("Refusing to output binary data to TTY, use --force to override"));
|
||||
// Read only the first 8192 bytes for binary detection
|
||||
let mut sample_buffer = vec![0; PIPESIZE];
|
||||
let bytes_read = reader.read(&mut sample_buffer)?;
|
||||
if is_binary(&sample_buffer[..bytes_read]) {
|
||||
return Err(anyhow!(
|
||||
"Refusing to output binary data to TTY, use --force to override"
|
||||
));
|
||||
}
|
||||
// We need to create a new reader since we consumed some bytes
|
||||
let (new_reader, _, _) = item_service.get_item_content_info_streaming_with_chain(
|
||||
conn,
|
||||
item_id,
|
||||
filter_chain.as_ref(),
|
||||
)?;
|
||||
reader = new_reader;
|
||||
}
|
||||
|
||||
// If not binary, output the data we've read
|
||||
std::io::stdout().write_all(&buffer[..bytes_read])?;
|
||||
|
||||
// Continue reading and outputting the rest of the data
|
||||
// Stream the content to stdout
|
||||
let mut stdout = std::io::stdout();
|
||||
std::io::copy(&mut reader, &mut stdout)?;
|
||||
} else {
|
||||
// No binary detection needed, just output the data
|
||||
compression_engine.cat(item_path.clone())?;
|
||||
let mut buffer = [0; PIPESIZE];
|
||||
loop {
|
||||
let bytes_read = reader.read(&mut buffer)?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
stdout.write_all(&buffer[..bytes_read])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!("Unable to find matching item in database"))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_stdout_tty() -> bool {
|
||||
#[cfg(unix)]
|
||||
unsafe {
|
||||
libc::isatty(libc::STDOUT_FILENO) != 0
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
unsafe {
|
||||
let stdout_handle = winapi::um::processenv::GetStdHandle(winapi::um::winbase::STD_OUTPUT_HANDLE);
|
||||
let mut console_mode: winapi::shared::minwindef::DWORD = 0;
|
||||
winapi::um::consoleapi::GetConsoleMode(stdout_handle, &mut console_mode) != 0
|
||||
}
|
||||
|
||||
// Fallback for non-unix platforms or if we can't determine
|
||||
#[cfg(not(any(unix, windows)))]
|
||||
false
|
||||
}
|
||||
|
||||
@@ -1,58 +1,96 @@
|
||||
use crate::db::Item;
|
||||
use crate::modes::common::{format_size, get_output_format, OutputFormat};
|
||||
use anyhow::anyhow;
|
||||
use serde_json;
|
||||
use serde_yaml;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::config;
|
||||
use crate::modes::common::{OutputFormat, format_size};
|
||||
use crate::services::types::ItemWithMeta;
|
||||
use anyhow::{Result, anyhow};
|
||||
use clap::Command;
|
||||
use clap::error::ErrorKind;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::compression_engine::CompressionType;
|
||||
use crate::db::{get_item, get_item_last, get_item_matching};
|
||||
use crate::modes::common::get_format_box_chars_no_border_line_separator;
|
||||
use crate::services::item_service::ItemService;
|
||||
use chrono::prelude::*;
|
||||
use is_terminal::IsTerminal;
|
||||
use prettytable::format;
|
||||
use prettytable::{Attr, Cell, Row, Table};
|
||||
use comfy_table::{Attribute, Cell};
|
||||
|
||||
/// Displays detailed information about an item or the last item if no ID/tags specified.
|
||||
///
|
||||
/// Supports table, JSON, or YAML output formats. Validates input (at most one ID, no mixing IDs/tags).
|
||||
/// Uses ItemService to fetch the item and displays via helpers.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Mutable Clap command for error handling and exiting on invalid args.
|
||||
/// * `settings` - Application settings for output formatting and human-readable sizes.
|
||||
/// * `ids` - Mutable vector of item IDs (at most one; cleared if tags used).
|
||||
/// * `tags` - Mutable vector of tags (mutually exclusive with IDs).
|
||||
/// * `conn` - Mutable database connection for querying items.
|
||||
/// * `data_path` - Path to data directory for file metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Ok(())` on success, or `Err(anyhow::Error)` if item not found or DB query fails.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * Clap errors if invalid args (e.g., multiple IDs).
|
||||
/// * Anyhow error if no matching item found.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// mode_info(&mut cmd, &settings, &mut vec![123], &mut vec![], &mut conn, data_path)?;
|
||||
/// ```
|
||||
pub fn mode_info(
|
||||
cmd: &mut Command,
|
||||
args: &crate::Args,
|
||||
ids: &mut Vec<i64>,
|
||||
tags: &mut Vec<String>,
|
||||
settings: &config::Settings,
|
||||
ids: &mut [i64],
|
||||
tags: &mut [String],
|
||||
conn: &mut rusqlite::Connection,
|
||||
data_path: PathBuf,
|
||||
) -> anyhow::Result<()> {
|
||||
) -> Result<()> {
|
||||
// For --info, we can use either IDs or tags, but not both
|
||||
if !ids.is_empty() && !tags.is_empty() {
|
||||
cmd.error(ErrorKind::InvalidValue, "Both ID and tags given, you must supply exactly one ID or atleast one tag when using --info").exit();
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"Both ID and tags given, you must supply either IDs or tags when using --info",
|
||||
)
|
||||
.exit();
|
||||
} else if ids.len() > 1 {
|
||||
cmd.error(ErrorKind::InvalidValue, "More than one ID given, you must supply exactly one ID or atleast one tag when using --info").exit();
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"More than one ID given, you must supply exactly one ID when using --info",
|
||||
)
|
||||
.exit();
|
||||
}
|
||||
// If both are empty, find_item will find the last item
|
||||
|
||||
let item_service = ItemService::new(data_path.clone());
|
||||
// Use empty metadata HashMap
|
||||
let item_with_meta = item_service
|
||||
.find_item(conn, ids, tags, &std::collections::HashMap::new())
|
||||
.map_err(|e| anyhow!("Unable to find matching item in database: {}", e))?;
|
||||
|
||||
show_item(item_with_meta, settings, data_path)
|
||||
}
|
||||
|
||||
let mut meta: std::collections::HashMap<String, String> = std::collections::HashMap::new();
|
||||
for item in args.item.meta.iter() {
|
||||
let item = item.clone();
|
||||
meta.insert(item.key, item.value);
|
||||
}
|
||||
|
||||
let item_maybe = match tags.is_empty() && meta.is_empty() {
|
||||
true => match ids.iter().next() {
|
||||
Some(item_id) => get_item(conn, *item_id)?,
|
||||
None => get_item_last(conn)?,
|
||||
},
|
||||
false => get_item_matching(conn, tags, &meta)?,
|
||||
};
|
||||
|
||||
match item_maybe {
|
||||
Some(item) => show_item(item, args, conn, data_path),
|
||||
None => Err(anyhow!("Unable to find matching item in database")),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct ItemInfo {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
/// Structured representation of item information for JSON/YAML output.
|
||||
///
|
||||
/// This struct serializes item details including ID, timestamp, sizes, compression, tags, and metadata
|
||||
/// for non-table output formats.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `id` - The unique item ID.
|
||||
/// * `timestamp` - Formatted timestamp string.
|
||||
/// * `path` - Full file path to the item.
|
||||
/// * `stream_size` - Original uncompressed size in bytes (optional).
|
||||
/// * `stream_size_formatted` - Human-readable stream size.
|
||||
/// * `compression` - Compression type used.
|
||||
/// * `file_size` - Compressed file size in bytes (optional).
|
||||
/// * `file_size_formatted` - Human-readable file size.
|
||||
/// * `tags` - List of associated tags.
|
||||
/// * `meta` - Metadata key-value pairs.
|
||||
pub struct ItemInfo {
|
||||
id: i64,
|
||||
timestamp: String,
|
||||
path: String,
|
||||
@@ -65,136 +103,172 @@ struct ItemInfo {
|
||||
meta: std::collections::HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Displays item information in table format or delegates to structured output.
|
||||
///
|
||||
/// Builds a comfy-table for tabular display or calls structured helper for JSON/YAML.
|
||||
/// Handles file size via metadata and formats tags/meta accordingly.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_with_meta` - Item with associated metadata and tags.
|
||||
/// * `settings` - Application settings for formatting (e.g., human-readable sizes).
|
||||
/// * `data_path` - Path to data directory for calculating compressed file size.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Ok(())` on success, or `Err(anyhow::Error)` if path resolution fails.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * Anyhow error if item path cannot be stringified.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// show_item(item_with_meta, &settings, data_path)?;
|
||||
/// ```
|
||||
fn show_item(
|
||||
item: Item, // Using the provided struct definition
|
||||
args: &crate::Args,
|
||||
conn: &mut rusqlite::Connection,
|
||||
item_with_meta: ItemWithMeta,
|
||||
settings: &config::Settings,
|
||||
data_path: PathBuf,
|
||||
) -> anyhow::Result<()> {
|
||||
let item_id = item.id.unwrap(); // Consider using if let or expect for Option
|
||||
|
||||
let item_tags: Vec<String> = crate::db::get_item_tags(conn, &item)?
|
||||
.into_iter()
|
||||
.map(|x| x.name)
|
||||
.collect();
|
||||
|
||||
let output_format = get_output_format(args);
|
||||
) -> Result<()> {
|
||||
let output_format = crate::modes::common::settings_output_format(settings);
|
||||
|
||||
if output_format != OutputFormat::Table {
|
||||
return show_item_structured(item, args, conn, data_path, output_format);
|
||||
return show_item_structured(item_with_meta, settings, data_path, output_format);
|
||||
}
|
||||
|
||||
let mut table = Table::new();
|
||||
if std::io::stdout().is_terminal() {
|
||||
table.set_format(get_format_box_chars_no_border_line_separator());
|
||||
} else {
|
||||
table.set_format(*format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR);
|
||||
}
|
||||
let item = item_with_meta.item;
|
||||
let item_id = item.id.unwrap();
|
||||
let item_tags: Vec<String> = item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new("ID").with_style(Attr::Bold),
|
||||
Cell::new(&item_id.to_string()),
|
||||
]));
|
||||
let mut table = crate::modes::common::create_table(false);
|
||||
|
||||
let ts_cell = Cell::new(&item.ts.with_timezone(&Local).format("%F %T %Z").to_string());
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new("Timestamp").with_style(Attr::Bold),
|
||||
ts_cell,
|
||||
]));
|
||||
// Add all the rows
|
||||
table.add_row(vec![
|
||||
Cell::new("ID").add_attribute(Attribute::Bold),
|
||||
Cell::new(item_id.to_string()),
|
||||
]);
|
||||
|
||||
let mut item_path_buf = data_path.clone(); // Renamed to avoid conflict if item_path is used later
|
||||
item_path_buf.push(item.id.unwrap().to_string()); // Again, consider safer unwrap
|
||||
let timestamp_str = item.ts.with_timezone(&Local).format("%F %T %Z").to_string();
|
||||
table.add_row(vec![
|
||||
Cell::new("Timestamp").add_attribute(Attribute::Bold),
|
||||
Cell::new(×tamp_str),
|
||||
]);
|
||||
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new("Path").with_style(Attr::Bold),
|
||||
Cell::new(item_path_buf.to_str().expect("Unable to get item path")),
|
||||
]));
|
||||
let mut item_path_buf = data_path.clone();
|
||||
item_path_buf.push(item.id.unwrap().to_string());
|
||||
let path_str = item_path_buf
|
||||
.to_str()
|
||||
.expect("Unable to get item path")
|
||||
.to_string();
|
||||
table.add_row(vec![
|
||||
Cell::new("Path").add_attribute(Attribute::Bold),
|
||||
Cell::new(&path_str),
|
||||
]);
|
||||
|
||||
let size_cell = match item.size {
|
||||
Some(size) => Cell::new(format_size(size as u64, args.options.human_readable).as_str()),
|
||||
None => Cell::new("Missing")
|
||||
.with_style(Attr::ForegroundColor(prettytable::color::RED))
|
||||
.with_style(Attr::Bold),
|
||||
let size_str = match item.size {
|
||||
Some(size) => format_size(size as u64, settings.human_readable),
|
||||
None => "Missing".to_string(),
|
||||
};
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new("Stream Size").with_style(Attr::Bold),
|
||||
size_cell,
|
||||
]));
|
||||
table.add_row(vec![
|
||||
Cell::new("Stream Size").add_attribute(Attribute::Bold),
|
||||
Cell::new(&size_str),
|
||||
]);
|
||||
|
||||
// compression_type is CompressionType due to '?'
|
||||
let compression_type_val = CompressionType::from_str(&item.compression)
|
||||
.map_err(|e| anyhow!("Failed to parse compression type: {}", e))?;
|
||||
table.add_row(vec![
|
||||
Cell::new("Compression").add_attribute(Attribute::Bold),
|
||||
Cell::new(&item.compression),
|
||||
]);
|
||||
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new("Compression").with_style(Attr::Bold),
|
||||
Cell::new(&compression_type_val.to_string()),
|
||||
]));
|
||||
|
||||
let file_size_cell = match item_path_buf.metadata() {
|
||||
Ok(metadata) => {
|
||||
Cell::new(format_size(metadata.len(), args.options.human_readable).as_str())
|
||||
}
|
||||
Err(_) => Cell::new("Missing")
|
||||
.with_style(Attr::ForegroundColor(prettytable::color::RED))
|
||||
.with_style(Attr::Bold),
|
||||
let file_size_str = match item_path_buf.metadata() {
|
||||
Ok(metadata) => format_size(metadata.len(), settings.human_readable),
|
||||
Err(_) => "Missing".to_string(),
|
||||
};
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new("File Size").with_style(Attr::Bold),
|
||||
file_size_cell,
|
||||
]));
|
||||
table.add_row(vec![
|
||||
Cell::new("File Size").add_attribute(Attribute::Bold),
|
||||
Cell::new(&file_size_str),
|
||||
]);
|
||||
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new("Tags").with_style(Attr::Bold),
|
||||
Cell::new(&item_tags.join(" ")),
|
||||
]));
|
||||
let tags_str = item_tags.join(" ");
|
||||
table.add_row(vec![
|
||||
Cell::new("Tags").add_attribute(Attribute::Bold),
|
||||
Cell::new(&tags_str),
|
||||
]);
|
||||
|
||||
for meta in crate::db::get_item_meta(conn, &item)? {
|
||||
// Add meta rows
|
||||
for meta in item_with_meta.meta {
|
||||
let meta_name = format!("Meta: {}", &meta.name);
|
||||
table.add_row(Row::new(vec![
|
||||
Cell::new(meta_name.as_str()).with_style(Attr::Bold),
|
||||
table.add_row(vec![
|
||||
Cell::new(&meta_name).add_attribute(Attribute::Bold),
|
||||
Cell::new(&meta.value),
|
||||
]));
|
||||
]);
|
||||
}
|
||||
|
||||
table.printstd();
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&table.trim_fmt())
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Displays item information in structured JSON or YAML format.
|
||||
///
|
||||
/// Serializes ItemInfo and prints pretty-formatted output. Handles file metadata for sizes.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_with_meta` - Item with metadata and tags.
|
||||
/// * `settings` - Settings for size formatting (human-readable).
|
||||
/// * `data_path` - Data path for compressed file size calculation.
|
||||
/// * `output_format` - JSON or YAML (Table is unreachable here).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Ok(())` on success, or `Err(anyhow::Error)` if serialization or path fails.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * Serde errors during JSON/YAML serialization.
|
||||
/// * Anyhow error if file metadata unavailable.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// show_item_structured(item_with_meta, &settings, data_path, OutputFormat::Json)?;
|
||||
/// ```
|
||||
fn show_item_structured(
|
||||
item: Item,
|
||||
args: &crate::Args,
|
||||
conn: &mut rusqlite::Connection,
|
||||
item_with_meta: ItemWithMeta,
|
||||
settings: &config::Settings,
|
||||
data_path: PathBuf,
|
||||
output_format: OutputFormat,
|
||||
) -> anyhow::Result<()> {
|
||||
) -> Result<()> {
|
||||
let item_tags: Vec<String> = item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let meta_map = item_with_meta.meta_as_map();
|
||||
let item = item_with_meta.item;
|
||||
let item_id = item.id.unwrap();
|
||||
let item_tags: Vec<String> = crate::db::get_item_tags(conn, &item)?
|
||||
.into_iter()
|
||||
.map(|x| x.name)
|
||||
.collect();
|
||||
|
||||
let mut item_path_buf = data_path.clone();
|
||||
item_path_buf.push(item_id.to_string());
|
||||
|
||||
let file_size = item_path_buf.metadata().map(|m| m.len()).ok();
|
||||
let file_size_formatted = match file_size {
|
||||
Some(size) => format_size(size, args.options.human_readable),
|
||||
Some(size) => format_size(size, settings.human_readable),
|
||||
None => "Missing".to_string(),
|
||||
};
|
||||
|
||||
let stream_size_formatted = match item.size {
|
||||
Some(size) => format_size(size as u64, args.options.human_readable),
|
||||
Some(size) => format_size(size as u64, settings.human_readable),
|
||||
None => "Missing".to_string(),
|
||||
};
|
||||
|
||||
let mut meta_map = std::collections::HashMap::new();
|
||||
for meta in crate::db::get_item_meta(conn, &item)? {
|
||||
meta_map.insert(meta.name, meta.value);
|
||||
}
|
||||
|
||||
let item_info = ItemInfo {
|
||||
id: item_id,
|
||||
timestamp: item.ts.with_timezone(&chrono::Local).format("%F %T %Z").to_string(),
|
||||
timestamp: item
|
||||
.ts
|
||||
.with_timezone(&chrono::Local)
|
||||
.format("%F %T %Z")
|
||||
.to_string(),
|
||||
path: item_path_buf.to_str().unwrap_or("").to_string(),
|
||||
stream_size: item.size.map(|s| s as u64),
|
||||
stream_size_formatted,
|
||||
|
||||
@@ -1,38 +1,175 @@
|
||||
use crate::db::{get_items, get_items_matching};
|
||||
/// List mode implementation.
|
||||
///
|
||||
/// This module provides the functionality to list stored items with customizable
|
||||
/// formatting, filtering by tags, and support for different output formats
|
||||
/// including table, JSON, and YAML.
|
||||
use crate::config;
|
||||
use crate::modes::common::ColumnType;
|
||||
use crate::modes::common::{size_column, string_column, get_output_format, OutputFormat};
|
||||
use crate::modes::common::{OutputFormat, format_size};
|
||||
use crate::services::item_service::ItemService;
|
||||
use crate::services::types::ItemWithMeta;
|
||||
use anyhow::Result;
|
||||
use comfy_table::CellAlignment;
|
||||
use comfy_table::{Attribute, Cell, Color, Row};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
use serde_yaml;
|
||||
use anyhow::anyhow;
|
||||
use log::debug;
|
||||
use prettytable::color;
|
||||
use prettytable::row;
|
||||
use prettytable::format::Alignment;
|
||||
use prettytable::{Attr, Cell, Row, Table};
|
||||
|
||||
/// Structure representing a list item for structured output formats.
|
||||
///
|
||||
/// This struct holds all the information needed to serialize an item for JSON or
|
||||
/// YAML output in list mode.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct ListItem {
|
||||
/// Item ID.
|
||||
///
|
||||
/// The unique identifier for the item.
|
||||
id: Option<i64>,
|
||||
/// Timestamp.
|
||||
///
|
||||
/// The formatted timestamp string for the item.
|
||||
time: String,
|
||||
/// Size in bytes.
|
||||
///
|
||||
/// The raw size of the item content.
|
||||
size: Option<u64>,
|
||||
/// Formatted size.
|
||||
///
|
||||
/// Human-readable size string.
|
||||
size_formatted: String,
|
||||
/// Compression type.
|
||||
///
|
||||
/// The compression algorithm used for the item.
|
||||
compression: String,
|
||||
/// File size in bytes.
|
||||
///
|
||||
/// The size of the stored file on disk.
|
||||
file_size: Option<u64>,
|
||||
/// Formatted file size.
|
||||
///
|
||||
/// Human-readable file size string.
|
||||
file_size_formatted: String,
|
||||
/// File path.
|
||||
///
|
||||
/// The full path to the item's storage file.
|
||||
file_path: String,
|
||||
/// Tags.
|
||||
///
|
||||
/// Vector of tag names associated with the item.
|
||||
tags: Vec<String>,
|
||||
/// Metadata.
|
||||
///
|
||||
/// HashMap of metadata key-value pairs.
|
||||
meta: std::collections::HashMap<String, String>,
|
||||
}
|
||||
|
||||
// Helper function to apply color to a cell.
|
||||
///
|
||||
/// This function converts the configuration color to a comfy-table Color and
|
||||
/// applies it to the cell as foreground or background color.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cell` - The cell to modify.
|
||||
/// * `color` - The color from configuration to apply.
|
||||
/// * `is_foreground` - True for foreground color, false for background.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The modified cell with color applied.
|
||||
fn apply_color(mut cell: Cell, color: &crate::config::TableColor, is_foreground: bool) -> Cell {
|
||||
use crate::config::TableColor::*;
|
||||
use comfy_table::Color;
|
||||
|
||||
let comfy_color = match color {
|
||||
Black => Color::Black,
|
||||
Red => Color::Red,
|
||||
Green => Color::Green,
|
||||
Yellow => Color::Yellow,
|
||||
Blue => Color::Blue,
|
||||
Magenta => Color::Magenta,
|
||||
Cyan => Color::Cyan,
|
||||
White => Color::White,
|
||||
Gray => Color::Grey,
|
||||
DarkRed => Color::DarkRed,
|
||||
DarkGreen => Color::DarkGreen,
|
||||
DarkYellow => Color::DarkYellow,
|
||||
DarkBlue => Color::DarkBlue,
|
||||
DarkMagenta => Color::DarkMagenta,
|
||||
DarkCyan => Color::DarkCyan,
|
||||
Rgb(r, g, b) => Color::Rgb {
|
||||
r: *r,
|
||||
g: *g,
|
||||
b: *b,
|
||||
},
|
||||
};
|
||||
|
||||
if is_foreground {
|
||||
cell = cell.fg(comfy_color);
|
||||
} else {
|
||||
cell = cell.bg(comfy_color);
|
||||
}
|
||||
|
||||
cell
|
||||
}
|
||||
|
||||
// Helper function to apply attribute to a cell.
|
||||
///
|
||||
/// This function applies a single table attribute to the cell based on the
|
||||
/// configuration attribute type.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cell` - The cell to modify.
|
||||
/// * `attribute` - The attribute from configuration to apply.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The modified cell with attribute applied.
|
||||
fn apply_attribute(mut cell: Cell, attribute: &crate::config::TableAttribute) -> Cell {
|
||||
use crate::config::TableAttribute::*;
|
||||
use comfy_table::Attribute;
|
||||
|
||||
match attribute {
|
||||
Bold => cell = cell.add_attribute(Attribute::Bold),
|
||||
Dim => cell = cell.add_attribute(Attribute::Dim),
|
||||
Italic => cell = cell.add_attribute(Attribute::Italic),
|
||||
Underlined => cell = cell.add_attribute(Attribute::Underlined),
|
||||
SlowBlink => cell = cell.add_attribute(Attribute::SlowBlink),
|
||||
RapidBlink => cell = cell.add_attribute(Attribute::RapidBlink),
|
||||
Reverse => cell = cell.add_attribute(Attribute::Reverse),
|
||||
Hidden => cell = cell.add_attribute(Attribute::Hidden),
|
||||
CrossedOut => cell = cell.add_attribute(Attribute::CrossedOut),
|
||||
}
|
||||
|
||||
cell
|
||||
}
|
||||
|
||||
/// Main list mode function.
|
||||
///
|
||||
/// This function handles the listing of items based on tags, applying formatting
|
||||
/// and output options from settings. It supports table, JSON, and YAML output formats.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Mutable reference to the Clap command for error handling.
|
||||
/// * `settings` - Reference to application settings.
|
||||
/// * `ids` - Mutable vector of item IDs (should be empty for list mode).
|
||||
/// * `tags` - Reference to vector of tags for filtering.
|
||||
/// * `conn` - Mutable reference to database connection.
|
||||
/// * `data_path` - Path to the data directory.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<()>` - Success or error if listing fails.
|
||||
pub fn mode_list(
|
||||
cmd: &mut clap::Command,
|
||||
args: &crate::Args,
|
||||
ids: &mut Vec<i64>,
|
||||
tags: &Vec<String>,
|
||||
settings: &config::Settings,
|
||||
ids: &mut [i64],
|
||||
tags: &[String],
|
||||
conn: &mut rusqlite::Connection,
|
||||
data_path: std::path::PathBuf,
|
||||
) -> anyhow::Result<()> {
|
||||
) -> Result<()> {
|
||||
if !ids.is_empty() {
|
||||
cmd.error(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
@@ -41,187 +178,194 @@ pub fn mode_list(
|
||||
.exit();
|
||||
}
|
||||
|
||||
let mut meta: std::collections::HashMap<String, String> = std::collections::HashMap::new();
|
||||
for item in args.item.meta.iter() {
|
||||
let item = item.clone();
|
||||
meta.insert(item.key, item.value);
|
||||
}
|
||||
let item_service = ItemService::new(data_path.clone());
|
||||
let items_with_meta = item_service.list_items(conn, tags, &std::collections::HashMap::new())?;
|
||||
|
||||
let items = match tags.is_empty() && meta.is_empty() {
|
||||
true => get_items(conn)?,
|
||||
false => get_items_matching(conn, tags, &meta)?,
|
||||
};
|
||||
|
||||
debug!("MAIN: Items: {:?}", items);
|
||||
|
||||
// Collect all item IDs for batch queries
|
||||
let item_ids: Vec<i64> = items.iter().map(|item| item.id.unwrap()).collect();
|
||||
|
||||
// Fetch all tags for all items in a single query
|
||||
let all_tags = crate::db::get_tags_for_items(conn, &item_ids)?;
|
||||
let mut tags_by_item: std::collections::HashMap<i64, Vec<String>> =
|
||||
std::collections::HashMap::new();
|
||||
|
||||
// Convert Tag structs to just names
|
||||
for (item_id, tags) in all_tags {
|
||||
let tag_names: Vec<String> = tags.into_iter().map(|tag| tag.name).collect();
|
||||
tags_by_item.insert(item_id, tag_names);
|
||||
}
|
||||
|
||||
// Fetch all metadata for all items in a single query
|
||||
let meta_by_item = crate::db::get_meta_for_items(conn, &item_ids)?;
|
||||
|
||||
let output_format = get_output_format(args);
|
||||
let output_format = crate::modes::common::settings_output_format(settings);
|
||||
|
||||
if output_format != OutputFormat::Table {
|
||||
return show_list_structured(items, tags_by_item, meta_by_item, data_path, args, output_format);
|
||||
return show_list_structured(items_with_meta, data_path, settings, output_format);
|
||||
}
|
||||
|
||||
let mut table = Table::new();
|
||||
table.set_format(*prettytable::format::consts::FORMAT_CLEAN);
|
||||
let mut table = crate::modes::common::create_table_with_config(&settings.table_config);
|
||||
|
||||
let list_format = args.options.list_format.split(",");
|
||||
|
||||
let mut title_row = row!();
|
||||
|
||||
for column in list_format.clone() {
|
||||
let mut column_format = column.split(":");
|
||||
let column_name = column_format.next().expect("Unable to parse column name");
|
||||
let column_type = ColumnType::from_str(column_name)
|
||||
.map_err(|_| anyhow!("Unknown column {:?}", column_name))?;
|
||||
|
||||
if column_type == ColumnType::Meta {
|
||||
let meta_name = column_format
|
||||
.next()
|
||||
.expect("Unable to parse metadata name for meta column");
|
||||
title_row.add_cell(Cell::new(meta_name).with_style(Attr::Bold));
|
||||
} else {
|
||||
title_row.add_cell(Cell::new(&column_type.to_string()).with_style(Attr::Bold));
|
||||
}
|
||||
// Create header row
|
||||
let mut header_cells = Vec::new();
|
||||
for column in &settings.list_format {
|
||||
header_cells.push(Cell::new(&column.label).add_attribute(Attribute::Bold));
|
||||
}
|
||||
table.set_header(header_cells);
|
||||
|
||||
table.set_titles(title_row);
|
||||
for item_with_meta in items_with_meta {
|
||||
let tags: Vec<String> = item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let meta = item_with_meta.meta_as_map();
|
||||
let item = item_with_meta.item;
|
||||
|
||||
for item in items {
|
||||
let item_id = item.id.unwrap();
|
||||
let tags = tags_by_item.get(&item_id).unwrap();
|
||||
let meta = meta_by_item.get(&item_id).unwrap();
|
||||
let mut item_path = data_path.clone();
|
||||
item_path.push(item.id.unwrap().to_string());
|
||||
|
||||
let mut table_row = Row::new(vec![]);
|
||||
let mut table_row = Row::new();
|
||||
|
||||
for column in &settings.list_format {
|
||||
let column_type = column
|
||||
.name
|
||||
.parse::<ColumnType>()
|
||||
.unwrap_or_else(|_| panic!("Unknown column {:?}", column.name));
|
||||
|
||||
for column in list_format.clone() {
|
||||
let mut column_format = column.split(":");
|
||||
let column_name = column_format.next().expect("Unable to parse column name");
|
||||
let column_type = ColumnType::from_str(column_name)
|
||||
.unwrap_or_else(|_| panic!("Unknown column {:?}", column_name));
|
||||
let mut meta_name: Option<&str> = None;
|
||||
|
||||
if column_type == ColumnType::Meta {
|
||||
meta_name = column_format.next();
|
||||
if let ColumnType::Meta = column_type {
|
||||
let parts: Vec<&str> = column.name.split(':').collect();
|
||||
if parts.len() > 1 {
|
||||
meta_name = Some(parts[1]);
|
||||
}
|
||||
}
|
||||
|
||||
let column_width: usize = match column_format.next() {
|
||||
Some(len) => len.parse().unwrap_or(0),
|
||||
None => 0,
|
||||
};
|
||||
|
||||
let cell = match column_type {
|
||||
ColumnType::Id => Cell::new_align(
|
||||
&string_column(item.id.unwrap_or(0).to_string(), column_width),
|
||||
Alignment::RIGHT,
|
||||
),
|
||||
ColumnType::Time => Cell::new(&string_column(
|
||||
item.ts
|
||||
let cell_content = match column_type {
|
||||
ColumnType::Id => item.id.unwrap_or(0).to_string(),
|
||||
ColumnType::Time => item
|
||||
.ts
|
||||
.with_timezone(&chrono::Local)
|
||||
.format("%F %T")
|
||||
.to_string(),
|
||||
column_width,
|
||||
)),
|
||||
ColumnType::Size => match item.size {
|
||||
Some(size) => Cell::new_align(
|
||||
&size_column(size as u64, args.options.human_readable, column_width),
|
||||
Alignment::RIGHT,
|
||||
),
|
||||
Some(size) => format_size(size as u64, settings.human_readable),
|
||||
None => match item_path.metadata() {
|
||||
Ok(_) => Cell::new_align("Unknown", Alignment::RIGHT)
|
||||
.with_style(Attr::ForegroundColor(color::YELLOW))
|
||||
.with_style(Attr::Bold),
|
||||
Err(_) => Cell::new_align("Missing", Alignment::RIGHT)
|
||||
.with_style(Attr::ForegroundColor(color::RED))
|
||||
.with_style(Attr::Bold),
|
||||
Ok(_) => "Unknown".to_string(),
|
||||
Err(_) => "Missing".to_string(),
|
||||
},
|
||||
},
|
||||
ColumnType::Compression => {
|
||||
Cell::new(&string_column(item.compression.to_string(), column_width))
|
||||
},
|
||||
ColumnType::Compression => item.compression.to_string(),
|
||||
ColumnType::FileSize => match item_path.metadata() {
|
||||
Ok(metadata) => Cell::new_align(
|
||||
&size_column(metadata.len(), args.options.human_readable, column_width),
|
||||
Alignment::RIGHT,
|
||||
),
|
||||
Err(_) => Cell::new_align("Missing", Alignment::RIGHT)
|
||||
.with_style(Attr::ForegroundColor(color::RED))
|
||||
.with_style(Attr::Bold),
|
||||
Ok(metadata) => format_size(metadata.len(), settings.human_readable),
|
||||
Err(_) => "Missing".to_string(),
|
||||
},
|
||||
ColumnType::FilePath => Cell::new(&string_column(
|
||||
item_path.clone().into_os_string().into_string().unwrap(),
|
||||
column_width,
|
||||
)),
|
||||
ColumnType::Tags => Cell::new(&string_column(tags.join(" "), column_width)),
|
||||
ColumnType::FilePath => item_path.clone().into_os_string().into_string().unwrap(),
|
||||
ColumnType::Tags => tags.join(" "),
|
||||
ColumnType::Meta => match meta_name {
|
||||
Some(meta_name) => match meta.get(meta_name) {
|
||||
Some(meta_value) => {
|
||||
Cell::new(&string_column(meta_value.to_string(), column_width))
|
||||
Some(meta_value) => meta_value.to_string(),
|
||||
None => "".to_string(),
|
||||
},
|
||||
None => "".to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
// Truncate content to max 3 lines
|
||||
let mut cell_lines: Vec<String> =
|
||||
cell_content.split('\n').map(|s| s.to_string()).collect();
|
||||
if cell_lines.len() > 3 {
|
||||
cell_lines.truncate(3);
|
||||
// Add ellipsis to the last line if we truncated
|
||||
if let Some(last_line) = cell_lines.last_mut() {
|
||||
if last_line.len() > 3 {
|
||||
last_line.truncate(last_line.len() - 3);
|
||||
}
|
||||
None => Cell::new(""),
|
||||
},
|
||||
None => Cell::new(""),
|
||||
},
|
||||
last_line.push_str("...");
|
||||
}
|
||||
}
|
||||
let truncated_content = cell_lines.join("\n");
|
||||
|
||||
let mut cell = Cell::new(truncated_content);
|
||||
|
||||
// Apply column-specific styling
|
||||
if let Some(fg_color) = &column.fg_color {
|
||||
cell = apply_color(cell, fg_color, true);
|
||||
}
|
||||
|
||||
if let Some(bg_color) = &column.bg_color {
|
||||
cell = apply_color(cell, bg_color, false);
|
||||
}
|
||||
|
||||
for attribute in &column.attributes {
|
||||
cell = apply_attribute(cell, attribute);
|
||||
}
|
||||
|
||||
// Apply padding if specified
|
||||
if let Some((_left_padding, _right_padding)) = column.padding {
|
||||
// Note: comfy-table doesn't directly support padding, so we'd need to handle this
|
||||
// by adding spaces to the content, or use a different approach
|
||||
}
|
||||
|
||||
// Apply styling for specific cases
|
||||
match column_type {
|
||||
ColumnType::Size => {
|
||||
if item.size.is_none() {
|
||||
if item_path.metadata().is_ok() {
|
||||
cell = cell
|
||||
.fg(comfy_table::Color::Yellow)
|
||||
.add_attribute(Attribute::Bold);
|
||||
} else {
|
||||
cell = cell
|
||||
.fg(comfy_table::Color::Red)
|
||||
.add_attribute(Attribute::Bold);
|
||||
}
|
||||
}
|
||||
}
|
||||
ColumnType::FileSize => {
|
||||
if item_path.metadata().is_err() {
|
||||
cell = cell
|
||||
.fg(comfy_table::Color::Red)
|
||||
.add_attribute(Attribute::Bold);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Apply alignment
|
||||
cell = match column.align {
|
||||
crate::config::ColumnAlignment::Right => cell.set_alignment(CellAlignment::Right),
|
||||
crate::config::ColumnAlignment::Left => cell.set_alignment(CellAlignment::Left),
|
||||
crate::config::ColumnAlignment::Center => cell.set_alignment(CellAlignment::Center),
|
||||
};
|
||||
table_row.add_cell(cell);
|
||||
}
|
||||
table.add_row(table_row);
|
||||
}
|
||||
|
||||
table.printstd();
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&table.trim_fmt())
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_list_structured(
|
||||
items: Vec<crate::db::Item>,
|
||||
tags_by_item: std::collections::HashMap<i64, Vec<String>>,
|
||||
meta_by_item: std::collections::HashMap<i64, std::collections::HashMap<String, String>>,
|
||||
items_with_meta: Vec<ItemWithMeta>,
|
||||
data_path: std::path::PathBuf,
|
||||
args: &crate::Args,
|
||||
settings: &config::Settings,
|
||||
output_format: OutputFormat,
|
||||
) -> anyhow::Result<()> {
|
||||
) -> Result<()> {
|
||||
let mut list_items = Vec::new();
|
||||
|
||||
for item in items {
|
||||
for item_with_meta in items_with_meta {
|
||||
let tags: Vec<String> = item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let meta = item_with_meta.meta_as_map();
|
||||
let item = item_with_meta.item;
|
||||
let item_id = item.id.unwrap();
|
||||
let tags = tags_by_item.get(&item_id).cloned().unwrap_or_default();
|
||||
let meta = meta_by_item.get(&item_id).cloned().unwrap_or_default();
|
||||
|
||||
let mut item_path = data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
|
||||
let file_size = item_path.metadata().map(|m| m.len()).ok();
|
||||
let file_size_formatted = match file_size {
|
||||
Some(size) => crate::modes::common::format_size(size, args.options.human_readable),
|
||||
Some(size) => crate::modes::common::format_size(size, settings.human_readable),
|
||||
None => "Missing".to_string(),
|
||||
};
|
||||
|
||||
let size_formatted = match item.size {
|
||||
Some(size) => crate::modes::common::format_size(size as u64, args.options.human_readable),
|
||||
Some(size) => crate::modes::common::format_size(size as u64, settings.human_readable),
|
||||
None => "Unknown".to_string(),
|
||||
};
|
||||
|
||||
let list_item = ListItem {
|
||||
id: item.id,
|
||||
time: item.ts.with_timezone(&chrono::Local).format("%F %T").to_string(),
|
||||
time: item
|
||||
.ts
|
||||
.with_timezone(&chrono::Local)
|
||||
.format("%F %T")
|
||||
.to_string(),
|
||||
size: item.size.map(|s| s as u64),
|
||||
size_formatted,
|
||||
compression: item.compression,
|
||||
|
||||
@@ -1,10 +1,49 @@
|
||||
#[cfg(feature = "server")]
|
||||
pub mod server;
|
||||
|
||||
/// Common utilities for all modes, including column types and output formatting.
|
||||
pub mod common;
|
||||
|
||||
pub mod delete;
|
||||
pub mod diff;
|
||||
pub mod generate_config;
|
||||
pub mod get;
|
||||
pub mod info;
|
||||
pub mod list;
|
||||
pub mod save;
|
||||
pub mod server;
|
||||
pub mod status;
|
||||
pub mod update;
|
||||
pub mod status_plugins;
|
||||
|
||||
/// Column types, output formats, and formatting utilities shared across modes.
|
||||
pub use common::{ColumnType, OutputFormat, format_size, settings_output_format};
|
||||
|
||||
/// Deletes items from the database by ID.
|
||||
pub use delete::mode_delete;
|
||||
|
||||
/// Compares two items and shows differences.
|
||||
pub use diff::mode_diff;
|
||||
|
||||
/// Generates a default configuration file.
|
||||
pub use generate_config::mode_generate_config;
|
||||
|
||||
/// Retrieves and outputs item content.
|
||||
pub use get::mode_get;
|
||||
|
||||
/// Displays detailed information about items.
|
||||
pub use info::mode_info;
|
||||
|
||||
/// Lists items with optional filtering.
|
||||
pub use list::mode_list;
|
||||
|
||||
/// Saves new item content with optional tags and metadata.
|
||||
pub use save::mode_save;
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
/// Starts the HTTP server for REST API access.
|
||||
pub use server::mode_server;
|
||||
|
||||
/// Shows status of directories and compression support.
|
||||
pub use status::mode_status;
|
||||
|
||||
/// Lists available plugins and their configurations.
|
||||
pub use status_plugins::mode_status_plugins;
|
||||
|
||||
@@ -1,12 +1,24 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::Result;
|
||||
use clap::Command;
|
||||
use log::debug;
|
||||
use std::io::{Read, Write, IsTerminal};
|
||||
use std::io::{Read, Write};
|
||||
|
||||
// Import the missing functions from common module
|
||||
use crate::modes::common::{cmd_args_digest_type, cmd_args_compression_type, cmd_args_meta_plugin_types};
|
||||
use crate::config;
|
||||
use crate::services::item_service::ItemService;
|
||||
|
||||
fn validate_save_args(cmd: &mut Command, ids: &Vec<i64>) {
|
||||
/// Validates save mode arguments and exits with error if invalid.
|
||||
///
|
||||
/// This function checks that no item IDs are provided for save mode,
|
||||
/// as save operations create new items rather than modifying existing ones.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Mutable reference to the Clap command for error reporting.
|
||||
/// * `ids` - Reference to the vector of item IDs (should be empty for save mode).
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Exits the program via Clap error if IDs are provided.
|
||||
fn validate_save_args(cmd: &mut Command, ids: &[i64]) {
|
||||
if !ids.is_empty() {
|
||||
cmd.error(
|
||||
clap::error::ErrorKind::InvalidValue,
|
||||
@@ -16,260 +28,107 @@ fn validate_save_args(cmd: &mut Command, ids: &Vec<i64>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn initialize_tags(tags: &mut Vec<String>) {
|
||||
if tags.is_empty() {
|
||||
tags.push("none".to_string());
|
||||
/// A tee reader that duplicates input to both a reader and a writer as it reads.
|
||||
///
|
||||
/// This struct implements the `Read` trait and forwards all read operations to
|
||||
/// an underlying reader while simultaneously writing the same data to a writer.
|
||||
/// It's useful for saving content to a file while also echoing it to stdout.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `reader` - The underlying reader providing the data source.
|
||||
/// * `writer` - The writer receiving copies of all read data.
|
||||
struct TeeReader<R: Read, W: Write> {
|
||||
reader: R,
|
||||
writer: W,
|
||||
}
|
||||
|
||||
impl<R: Read, W: Write> Read for TeeReader<R, W> {
|
||||
/// Reads data from the underlying reader and duplicates it to the writer.
|
||||
///
|
||||
/// This implementation reads from the inner reader and then writes the same
|
||||
/// bytes to the writer. If the read returns 0 bytes (EOF), it returns 0.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `buf` - Buffer to fill with data from the reader.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `io::Result<usize>` - Number of bytes read, or an I/O error.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the underlying read or write operations fail.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut tee = TeeReader {
|
||||
/// reader: std::io::Cursor::new(b"Hello, world!"),
|
||||
/// writer: std::io::sink(),
|
||||
/// };
|
||||
/// let mut buf = [0; 5];
|
||||
/// let n = tee.read(&mut buf).unwrap();
|
||||
/// assert_eq!(n, 5);
|
||||
/// assert_eq!(&buf[..n], b"Hello");
|
||||
/// ```
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
let n = self.reader.read(buf)?;
|
||||
if n > 0 {
|
||||
self.writer.write_all(&buf[..n])?;
|
||||
}
|
||||
Ok(n)
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_compression_and_plugins(
|
||||
cmd: &mut Command,
|
||||
args: &crate::Args,
|
||||
) -> (crate::compression_engine::CompressionType, Box<dyn crate::compression_engine::CompressionEngine>, Vec<Box<dyn crate::meta_plugin::MetaPlugin>>) {
|
||||
let digest_type = cmd_args_digest_type(cmd, &args);
|
||||
debug!("MAIN: Digest type: {:?}", digest_type);
|
||||
|
||||
let compression_type = cmd_args_compression_type(cmd, &args);
|
||||
debug!("MAIN: Compression type: {:?}", compression_type);
|
||||
let compression_engine =
|
||||
crate::compression_engine::get_compression_engine(compression_type.clone()).expect("Unable to get compression engine");
|
||||
|
||||
// Start with meta plugin types from command line
|
||||
let mut meta_plugin_types: Vec<crate::meta_plugin::MetaPluginType> = cmd_args_meta_plugin_types(cmd, &args);
|
||||
debug!("MAIN: Meta plugin types: {:?}", meta_plugin_types);
|
||||
|
||||
// Convert digest type to meta plugin type and add to the list if needed
|
||||
let digest_meta_plugin_type = match digest_type {
|
||||
crate::meta_plugin::MetaPluginType::DigestSha256 => Some(crate::meta_plugin::MetaPluginType::DigestSha256),
|
||||
crate::meta_plugin::MetaPluginType::DigestMd5 => Some(crate::meta_plugin::MetaPluginType::DigestMd5),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// Add digest meta plugin to the list if needed
|
||||
if let Some(digest_plugin_type) = digest_meta_plugin_type {
|
||||
if !meta_plugin_types.contains(&digest_plugin_type) {
|
||||
meta_plugin_types.push(digest_plugin_type);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize meta_plugins with MetaPlugin instances for each MetaPluginType
|
||||
let mut meta_plugins: Vec<Box<dyn crate::meta_plugin::MetaPlugin>> = meta_plugin_types
|
||||
.iter()
|
||||
.map(|meta_plugin_type| crate::meta_plugin::get_meta_plugin(meta_plugin_type.clone()))
|
||||
.collect();
|
||||
|
||||
// Check for unsupported meta plugins, warn the user, and remove them from the list
|
||||
let mut i = 0;
|
||||
meta_plugins.retain(|meta_plugin| {
|
||||
let is_supported = meta_plugin.is_supported();
|
||||
if !is_supported {
|
||||
// We need to get the meta name for the warning message
|
||||
// Since we can't mutably borrow meta_plugin here, we create a temporary one
|
||||
let meta_plugin_type = meta_plugin_types[i].clone();
|
||||
let mut temp_plugin = crate::meta_plugin::get_meta_plugin(meta_plugin_type);
|
||||
eprintln!("Warning: Meta plugin '{}' is enabled but not supported on this system", temp_plugin.meta_name());
|
||||
}
|
||||
i += 1;
|
||||
is_supported
|
||||
});
|
||||
|
||||
(compression_type, compression_engine, meta_plugins)
|
||||
}
|
||||
|
||||
fn create_and_log_item(
|
||||
conn: &mut rusqlite::Connection,
|
||||
args: &crate::Args,
|
||||
tags: &Vec<String>,
|
||||
compression_type: &crate::compression_engine::CompressionType,
|
||||
) -> Result<crate::db::Item, anyhow::Error> {
|
||||
let mut item = crate::db::Item {
|
||||
id: None,
|
||||
ts: chrono::Utc::now(),
|
||||
size: None,
|
||||
compression: compression_type.to_string(),
|
||||
};
|
||||
|
||||
let id = crate::db::insert_item(conn, item.clone())?;
|
||||
item.id = Some(id);
|
||||
debug!("MAIN: Added item {:?}", item.clone());
|
||||
|
||||
if !args.options.quiet {
|
||||
if std::io::stderr().is_terminal() {
|
||||
let mut t = term::stderr().unwrap();
|
||||
t.reset().unwrap_or(());
|
||||
t.attr(term::Attr::Bold).unwrap_or(());
|
||||
write!(t, "KEEP:").unwrap_or(());
|
||||
t.reset().unwrap_or(());
|
||||
write!(t, " New item ").unwrap_or(());
|
||||
t.attr(term::Attr::Bold).unwrap_or(());
|
||||
write!(t, "{id}")?;
|
||||
t.reset().unwrap_or(());
|
||||
write!(t, " tags: ")?;
|
||||
t.attr(term::Attr::Bold).unwrap_or(());
|
||||
write!(t, "{}", tags.join(" "))?;
|
||||
t.reset().unwrap_or(());
|
||||
writeln!(t)?;
|
||||
std::io::stderr().flush()?;
|
||||
} else {
|
||||
let mut t = std::io::stderr();
|
||||
writeln!(t, "KEEP: New item: {} tags: {:?}", id, tags)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(item)
|
||||
}
|
||||
|
||||
fn setup_item_metadata(
|
||||
conn: &mut rusqlite::Connection,
|
||||
_args: &crate::Args,
|
||||
item: &crate::db::Item,
|
||||
tags: &Vec<String>,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
crate::db::set_item_tags(conn, item.clone(), tags)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_item_meta(args: &crate::Args) -> std::collections::HashMap<String, String> {
|
||||
let mut item_meta: std::collections::HashMap<String, String> = crate::modes::common::get_meta_from_env();
|
||||
|
||||
if let Ok(hostname) = gethostname::gethostname().into_string() {
|
||||
if !item_meta.contains_key("hostname") {
|
||||
item_meta.insert("hostname".to_string(), hostname);
|
||||
}
|
||||
}
|
||||
|
||||
for item in args.item.meta.iter() {
|
||||
let item = item.clone();
|
||||
item_meta.insert(item.key, item.value);
|
||||
}
|
||||
|
||||
item_meta
|
||||
}
|
||||
|
||||
fn process_input_stream(
|
||||
compression_engine: &Box<dyn crate::compression_engine::CompressionEngine>,
|
||||
data_path: &std::path::PathBuf,
|
||||
item_id: i64,
|
||||
meta_plugins: &mut Vec<Box<dyn crate::meta_plugin::MetaPlugin>>,
|
||||
) -> Result<(Box<dyn std::io::Write>, crate::db::Item), anyhow::Error> {
|
||||
let mut item = crate::db::Item {
|
||||
id: Some(item_id),
|
||||
ts: chrono::Utc::now(),
|
||||
size: None,
|
||||
compression: String::new(), // Will be set later
|
||||
};
|
||||
|
||||
let mut item_path = data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
|
||||
let mut stdin = std::io::stdin().lock();
|
||||
let mut stdout = std::io::stdout().lock();
|
||||
let mut buffer = [0; libc::BUFSIZ as usize];
|
||||
|
||||
let mut item_out: Box<dyn std::io::Write> =
|
||||
compression_engine
|
||||
.create(item_path.clone())
|
||||
.map_err(|e| anyhow!("Unable to write file {:?}: {}", item_path, e))?;
|
||||
|
||||
debug!("MAIN: Starting IO loop");
|
||||
loop {
|
||||
let n = stdin.read(&mut buffer[..libc::BUFSIZ as usize])?;
|
||||
item.size = match item.size {
|
||||
None => Some(n as i64),
|
||||
Some(prev_n) => Some(prev_n + n as i64),
|
||||
};
|
||||
|
||||
if n == 0 {
|
||||
debug!("MAIN: EOF on STDIN");
|
||||
break;
|
||||
}
|
||||
|
||||
debug!("MAIN: Loop - {:?} bytes", item.size);
|
||||
|
||||
stdout.write_all(&buffer[..n])?;
|
||||
item_out.write_all(&buffer[..n])?;
|
||||
|
||||
for meta_plugin in meta_plugins.iter_mut() {
|
||||
meta_plugin.update(&buffer[..n]);
|
||||
}
|
||||
}
|
||||
debug!("MAIN: Ending IO loop after {:?} bytes", item.size);
|
||||
|
||||
stdout.flush()?;
|
||||
item_out.flush()?;
|
||||
|
||||
Ok((item_out, item))
|
||||
}
|
||||
|
||||
fn finalize_meta_plugins(
|
||||
conn: &rusqlite::Connection,
|
||||
meta_plugins: &mut Vec<Box<dyn crate::meta_plugin::MetaPlugin>>,
|
||||
item: &crate::db::Item,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
for meta_plugin in meta_plugins.iter_mut() {
|
||||
let meta_name = meta_plugin.meta_name();
|
||||
|
||||
match meta_plugin.finalize() {
|
||||
Ok(meta_value) => {
|
||||
let meta = crate::db::Meta {
|
||||
id: item.id.ok_or_else(|| anyhow!("Item missing ID"))?,
|
||||
name: meta_name.clone(),
|
||||
value: meta_value,
|
||||
};
|
||||
if let Err(e) = crate::db::store_meta(conn, meta) {
|
||||
eprintln!("Warning: Failed to store meta value for {}: {}", meta_name, e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Failed to finalize meta plugin {}: {}", meta_name, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Main save mode function.
|
||||
///
|
||||
/// This function handles the save operation by reading from stdin, duplicating
|
||||
/// the input to stdout (for real-time display), and saving the content to the
|
||||
/// item service. It validates arguments, creates the tee reader, and processes
|
||||
/// the save operation.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Mutable reference to the Clap command for error handling.
|
||||
/// * `settings` - Application settings containing configuration.
|
||||
/// * `ids` - Mutable vector of item IDs (should be empty for save mode).
|
||||
/// * `tags` - Mutable vector of tags to associate with the new item.
|
||||
/// * `conn` - Mutable reference to the database connection.
|
||||
/// * `data_path` - Path to the data storage directory.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(), anyhow::Error>` - Success or error if save fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// // In CLI context, this would be called internally
|
||||
/// mode_save(&mut cmd, &settings, &mut vec![], &mut vec!["important".to_string()], &mut conn, data_path)?;
|
||||
/// ```
|
||||
pub fn mode_save(
|
||||
cmd: &mut Command,
|
||||
args: &crate::Args,
|
||||
ids: &mut Vec<i64>,
|
||||
settings: &config::Settings,
|
||||
ids: &mut [i64],
|
||||
tags: &mut Vec<String>,
|
||||
conn: &mut rusqlite::Connection,
|
||||
data_path: std::path::PathBuf,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
validate_save_args(cmd, ids);
|
||||
initialize_tags(tags);
|
||||
|
||||
let (compression_type, compression_engine, mut meta_plugins) = setup_compression_and_plugins(cmd, args);
|
||||
let item_service = ItemService::new(data_path);
|
||||
|
||||
let mut item = create_and_log_item(conn, args, tags, &compression_type)?;
|
||||
setup_item_metadata(conn, args, &item, tags)?; // Pass mutable reference
|
||||
let stdin = std::io::stdin();
|
||||
let stdout = std::io::stdout();
|
||||
|
||||
// Save as much as possible in case something breaks - don't use transactions
|
||||
// This allows partial saves to succeed even if some metadata operations fail
|
||||
let item_meta = collect_item_meta(args);
|
||||
let item_id = item.id.ok_or_else(|| anyhow!("Item missing ID"))?;
|
||||
|
||||
for kv in item_meta.iter() {
|
||||
let meta = crate::db::Meta {
|
||||
id: item_id,
|
||||
name: kv.0.to_string(),
|
||||
value: kv.1.to_string(),
|
||||
let tee_reader = TeeReader {
|
||||
reader: stdin.lock(),
|
||||
writer: stdout.lock(),
|
||||
};
|
||||
crate::db::store_meta(conn, meta)?;
|
||||
}
|
||||
|
||||
let (_item_out, processed_item) = process_input_stream(
|
||||
&compression_engine,
|
||||
&data_path,
|
||||
item_id,
|
||||
&mut meta_plugins,
|
||||
)?;
|
||||
|
||||
item.size = processed_item.size;
|
||||
item.compression = compression_type.to_string();
|
||||
|
||||
finalize_meta_plugins(conn, &mut meta_plugins, &item)?;
|
||||
crate::db::update_item(conn, item.clone())?;
|
||||
item_service.save_item(tee_reader, cmd, settings, tags, conn)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use axum::{
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
use clap::Command;
|
||||
use log::{debug, info, warn};
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use tower_http::cors::CorsLayer;
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::trace::TraceLayer;
|
||||
|
||||
use crate::Args;
|
||||
|
||||
mod common;
|
||||
mod status;
|
||||
mod items;
|
||||
mod content;
|
||||
mod docs;
|
||||
|
||||
pub use common::{ServerConfig, AppState, logging_middleware};
|
||||
use status::handle_status;
|
||||
use items::{handle_list_items, handle_get_item, handle_put_item, handle_delete_item};
|
||||
use content::{handle_get_content_latest, handle_get_content};
|
||||
use docs::{handle_openapi, handle_swagger_ui};
|
||||
|
||||
pub fn mode_server(
|
||||
_cmd: &mut Command,
|
||||
args: &Args,
|
||||
conn: &mut rusqlite::Connection,
|
||||
data_path: PathBuf,
|
||||
) -> Result<()> {
|
||||
let server_address = args.mode.server.as_ref().unwrap();
|
||||
|
||||
let config = ServerConfig {
|
||||
address: server_address.clone(),
|
||||
password: args.options.server_password.clone(),
|
||||
};
|
||||
|
||||
// We need to move the connection into the async runtime
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
// Take ownership of the connection and move it into the async runtime
|
||||
let owned_conn = std::mem::replace(conn, rusqlite::Connection::open_in_memory()?);
|
||||
rt.block_on(run_server(config, owned_conn, data_path, args))
|
||||
}
|
||||
|
||||
async fn run_server(
|
||||
config: ServerConfig,
|
||||
conn: rusqlite::Connection,
|
||||
data_dir: PathBuf,
|
||||
args: &Args,
|
||||
) -> Result<()> {
|
||||
debug!("Starting REST HTTP server on {}", config.address);
|
||||
|
||||
// Use the existing database connection
|
||||
let db_conn = Arc::new(Mutex::new(conn));
|
||||
|
||||
let state = AppState {
|
||||
db: db_conn,
|
||||
data_dir: data_dir.clone(),
|
||||
password: config.password.clone(),
|
||||
args: Arc::new(args.clone()),
|
||||
};
|
||||
|
||||
let app = Router::new()
|
||||
.route("/status", get(handle_status))
|
||||
.route("/item/", get(handle_list_items).put(handle_put_item))
|
||||
.route("/item/:id", get(handle_get_item).delete(handle_delete_item))
|
||||
.route("/content", get(handle_get_content_latest))
|
||||
.route("/content/:id", get(handle_get_content))
|
||||
.route("/openapi.json", get(handle_openapi))
|
||||
.route("/swagger/", get(handle_swagger_ui))
|
||||
.layer(axum::middleware::from_fn(logging_middleware))
|
||||
.layer(
|
||||
ServiceBuilder::new()
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.layer(CorsLayer::permissive())
|
||||
)
|
||||
.with_state(state);
|
||||
|
||||
let addr: SocketAddr = if config.address.starts_with('/') || config.address.starts_with("./") {
|
||||
// Unix socket - not supported by axum directly, fall back to TCP
|
||||
warn!("Unix sockets not yet implemented, falling back to TCP on 127.0.0.1:8080");
|
||||
"127.0.0.1:8080".parse()?
|
||||
} else {
|
||||
config.address.parse()?
|
||||
};
|
||||
|
||||
info!("SERVER: HTTP server listening on {}", addr);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(addr).await?;
|
||||
axum::serve(
|
||||
listener,
|
||||
app.into_make_service_with_connect_info::<SocketAddr>()
|
||||
).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
37
src/modes/server/api/common.rs
Normal file
37
src/modes/server/api/common.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use axum::{
|
||||
http::{header, StatusCode},
|
||||
response::Response,
|
||||
};
|
||||
use serde::Serialize;
|
||||
use log;
|
||||
|
||||
pub struct ResponseBuilder;
|
||||
|
||||
impl ResponseBuilder {
|
||||
pub fn json<T: Serialize>(data: T) -> Result<Response, StatusCode> {
|
||||
let json = serde_json::to_vec(&data).map_err(|e| {
|
||||
log::warn!("Failed to serialize response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.header(header::CONTENT_LENGTH, json.len().to_string())
|
||||
.body(axum::body::Body::from(json))
|
||||
.map_err(|e| {
|
||||
log::warn!("Failed to build response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})
|
||||
}
|
||||
|
||||
pub fn binary(content: &[u8], mime_type: &str) -> Result<Response, StatusCode> {
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, mime_type)
|
||||
.header(header::CONTENT_LENGTH, content.len().to_string())
|
||||
.body(axum::body::Body::from(content.to_vec()))
|
||||
.map_err(|e| {
|
||||
log::warn!("Failed to build response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})
|
||||
}
|
||||
}
|
||||
703
src/modes/server/api/item.rs
Normal file
703
src/modes/server/api/item.rs
Normal file
@@ -0,0 +1,703 @@
|
||||
use crate::modes::server::common::{
|
||||
ApiResponse, AppState, ItemContentQuery, ItemInfo, ItemInfoListResponse, ItemInfoResponse,
|
||||
ItemQuery, ListItemsQuery, MetadataResponse, TagsQuery,
|
||||
};
|
||||
use crate::services::async_item_service::AsyncItemService;
|
||||
use crate::services::error::CoreError;
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
http::{StatusCode, header},
|
||||
response::{Json, Response},
|
||||
};
|
||||
use log::{debug, warn};
|
||||
use std::collections::HashMap;
|
||||
|
||||
// Helper functions to replace the missing binary_detection module
|
||||
async fn check_binary_content_allowed(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
allow_binary: bool,
|
||||
) -> Result<(), StatusCode> {
|
||||
if !allow_binary {
|
||||
let is_binary = is_content_binary(item_service, item_id, metadata).await?;
|
||||
if is_binary {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper function to determine if content is binary
|
||||
async fn is_content_binary(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
) -> Result<bool, StatusCode> {
|
||||
if let Some(text_val) = metadata.get("text") {
|
||||
Ok(text_val == "false")
|
||||
} else {
|
||||
// If text metadata isn't set, we need to check the content using streaming approach
|
||||
match item_service
|
||||
.get_item_content_info_streaming(item_id, None)
|
||||
.await
|
||||
{
|
||||
Ok((_, _, is_binary)) => Ok(is_binary),
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"Failed to get content info for binary check for item {}: {}",
|
||||
item_id,
|
||||
e
|
||||
);
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to replace missing build_filter_string
|
||||
fn build_filter_string(_params: &ItemQuery) -> Option<String> {
|
||||
// Implement this based on your needs
|
||||
None
|
||||
}
|
||||
|
||||
// Create a simple ResponseBuilder to replace the missing one
|
||||
struct ResponseBuilder;
|
||||
|
||||
impl ResponseBuilder {
|
||||
pub fn json<T: serde::Serialize>(data: T) -> Result<Response, StatusCode> {
|
||||
let json = serde_json::to_vec(&data).map_err(|e| {
|
||||
log::warn!("Failed to serialize response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.header(header::CONTENT_LENGTH, json.len().to_string())
|
||||
.body(axum::body::Body::from(json))
|
||||
.map_err(|e| {
|
||||
log::warn!("Failed to build response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})
|
||||
}
|
||||
|
||||
pub fn binary(content: &[u8], mime_type: &str) -> Result<Response, StatusCode> {
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, mime_type)
|
||||
.header(header::CONTENT_LENGTH, content.len().to_string())
|
||||
.body(axum::body::Body::from(content.to_vec()))
|
||||
.map_err(|e| {
|
||||
log::warn!("Failed to build response: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to get mime type from metadata
|
||||
fn get_mime_type(metadata: &HashMap<String, String>) -> String {
|
||||
metadata
|
||||
.get("mime_type")
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| "application/octet-stream".to_string())
|
||||
}
|
||||
|
||||
/// Helper function to apply offset and length to content
|
||||
fn apply_offset_length(content: &[u8], offset: u64, length: u64) -> &[u8] {
|
||||
let content_len = content.len() as u64;
|
||||
let start = std::cmp::min(offset, content_len);
|
||||
let end = if length > 0 {
|
||||
std::cmp::min(start + length, content_len)
|
||||
} else {
|
||||
content_len
|
||||
};
|
||||
|
||||
if start < content_len {
|
||||
&content[start as usize..end as usize]
|
||||
} else {
|
||||
&[]
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to handle item not found errors
|
||||
fn handle_item_error(error: CoreError) -> StatusCode {
|
||||
match error {
|
||||
CoreError::ItemNotFound(_) | CoreError::ItemNotFoundGeneric => StatusCode::NOT_FOUND,
|
||||
_ => {
|
||||
warn!("Failed to get item: {}", error);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to create AsyncItemService from AppState
|
||||
fn create_item_service(state: &AppState) -> AsyncItemService {
|
||||
AsyncItemService::new(
|
||||
state.data_dir.clone(),
|
||||
state.db.clone(),
|
||||
state.item_service.clone(),
|
||||
state.cmd.clone(),
|
||||
state.settings.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/item/",
|
||||
operation_id = "keep_list_items",
|
||||
summary = "List stored items",
|
||||
description = "Get paginated items with metadata and tags. Filter by tags, sort by creation time.",
|
||||
responses(
|
||||
(status = 200, description = "Items retrieved", body = ItemInfoListResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
params(
|
||||
("tags" = Option<String>, Query, description = "Comma-separated tags to filter"),
|
||||
("order" = Option<String>, Query, description = "Sort order: 'newest' or 'oldest'"),
|
||||
("start" = Option<u64>, Query, description = "Pagination start index"),
|
||||
("count" = Option<u64>, Query, description = "Number of items to return")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "item"
|
||||
)]
|
||||
pub async fn handle_list_items(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ListItemsQuery>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|s| s.split(',').map(|t| t.trim().to_string()).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
let mut items_with_meta = item_service
|
||||
.list_items(tags, HashMap::new())
|
||||
.await
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get items: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Apply ordering (default is newest first)
|
||||
match params.order.as_deref().unwrap_or("newest") {
|
||||
"newest" => items_with_meta.sort_by(|a, b| b.item.ts.cmp(&a.item.ts)),
|
||||
"oldest" => items_with_meta.sort_by(|a, b| a.item.ts.cmp(&b.item.ts)),
|
||||
_ => items_with_meta.sort_by(|a, b| b.item.ts.cmp(&a.item.ts)), // default to newest
|
||||
}
|
||||
|
||||
// Apply pagination
|
||||
let start = params.start.unwrap_or(0) as usize;
|
||||
let count = params.count.unwrap_or(100) as usize;
|
||||
let items_with_meta: Vec<_> = items_with_meta
|
||||
.into_iter()
|
||||
.skip(start)
|
||||
.take(count)
|
||||
.collect();
|
||||
|
||||
let item_infos: Vec<ItemInfo> = items_with_meta
|
||||
.into_iter()
|
||||
.map(|item_with_meta| {
|
||||
let item_id = item_with_meta.item.id.unwrap_or(0);
|
||||
let item_tags: Vec<String> =
|
||||
item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let item_meta = item_with_meta.meta_as_map();
|
||||
|
||||
ItemInfo {
|
||||
id: item_id,
|
||||
ts: item_with_meta.item.ts.to_rfc3339(),
|
||||
size: item_with_meta.item.size,
|
||||
compression: item_with_meta.item.compression,
|
||||
tags: item_tags,
|
||||
metadata: item_meta,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
ResponseBuilder::json(ApiResponse {
|
||||
success: true,
|
||||
data: Some(item_infos),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Handle as_meta=true response by returning JSON with metadata and content
|
||||
async fn handle_as_meta_response(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
) -> Result<Response, StatusCode> {
|
||||
// Get the item with metadata
|
||||
let item_with_meta = item_service.get_item(item_id).await.map_err(|e| {
|
||||
warn!("Failed to get item {} for as_meta content: {}", item_id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
handle_as_meta_response_with_metadata(item_service, item_id, &metadata, offset, length).await
|
||||
}
|
||||
|
||||
/// Handle as_meta=true response with pre-fetched metadata
|
||||
async fn handle_as_meta_response_with_metadata(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
) -> Result<Response, StatusCode> {
|
||||
// Check if content is binary
|
||||
let is_binary = is_content_binary(item_service, item_id, metadata).await?;
|
||||
|
||||
// Get the content if it's not binary
|
||||
if is_binary {
|
||||
// Return JSON with content as None and error message
|
||||
let response_body = serde_json::json!({
|
||||
"metadata": metadata,
|
||||
"content": serde_json::Value::Null,
|
||||
"error": "Content is binary"
|
||||
});
|
||||
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.status(StatusCode::UNPROCESSABLE_ENTITY)
|
||||
.body(axum::body::Body::from(response_body.to_string()))
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
|
||||
} else {
|
||||
// Get the content as text
|
||||
match item_service.get_item_content_info(item_id, None).await {
|
||||
Ok((content, _, _)) => {
|
||||
// Apply offset and length
|
||||
let content_len = content.len() as u64;
|
||||
let start = std::cmp::min(offset, content_len);
|
||||
let end = if length > 0 {
|
||||
std::cmp::min(start + length, content_len)
|
||||
} else {
|
||||
content_len
|
||||
};
|
||||
|
||||
let response_content = if start < content_len {
|
||||
&content[start as usize..end as usize]
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
// Convert to UTF-8 string
|
||||
let content_str = match String::from_utf8(response_content.to_vec()) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
// This shouldn't happen since we checked is_binary, but handle it just in case
|
||||
let response_body = serde_json::json!({
|
||||
"metadata": metadata,
|
||||
"content": serde_json::Value::Null,
|
||||
"error": "Content is not valid UTF-8"
|
||||
});
|
||||
|
||||
let response = Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.status(StatusCode::UNPROCESSABLE_ENTITY)
|
||||
.body(axum::body::Body::from(response_body.to_string()))
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
// Return JSON with metadata and content
|
||||
let response_body = serde_json::json!({
|
||||
"metadata": metadata,
|
||||
"content": content_str,
|
||||
"error": serde_json::Value::Null
|
||||
});
|
||||
|
||||
Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.body(axum::body::Body::from(response_body.to_string()))
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to get content for item {}: {}", item_id, e);
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/api/item/",
|
||||
operation_id = "keep_post_item",
|
||||
summary = "Store new item",
|
||||
description = "Upload content to store as a new item. Content is compressed, analyzed for metadata, and stored.",
|
||||
responses(
|
||||
(status = 201, description = "Item created", body = ItemInfoResponse),
|
||||
(status = 400, description = "Bad request"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
request_body(
|
||||
content = String,
|
||||
description = "Content to store",
|
||||
content_type = "application/octet-stream"
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "item"
|
||||
)]
|
||||
pub async fn handle_post_item(
|
||||
State(_state): State<AppState>,
|
||||
) -> Result<Json<ApiResponse<ItemInfo>>, StatusCode> {
|
||||
// This is a simplified implementation
|
||||
// In a real implementation, you'd need to properly parse multipart/form-data
|
||||
// or JSON payload with the item data
|
||||
|
||||
let response = ApiResponse::<ItemInfo> {
|
||||
success: false,
|
||||
data: None,
|
||||
error: Some("POST /api/item/ not yet implemented".to_string()),
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/item/latest/content",
|
||||
operation_id = "keep_get_item_latest_content",
|
||||
summary = "Download latest item content",
|
||||
description = "Get raw content of the most recent item. Filter by tags. Binary content can be restricted. \
|
||||
AI agents should use as_meta=true to get content and metadata in a structured JSON format.",
|
||||
responses(
|
||||
(status = 200, description = "Content retrieved"),
|
||||
(status = 400, description = "Binary content not allowed"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Item not found"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
params(
|
||||
("tags" = Option<String>, Query, description = "Tags to filter latest item"),
|
||||
("allow_binary" = Option<bool>, Query, description = "Allow binary content"),
|
||||
("offset" = Option<u64>, Query, description = "Byte offset to start reading"),
|
||||
("length" = Option<u64>, Query, description = "Number of bytes to read"),
|
||||
("stream" = Option<bool>, Query, description = "Stream response (true) or build in memory (false)"),
|
||||
("as_meta" = Option<bool>, Query, description = "Return content and metadata in JSON format (recommended for AI agents)")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "item"
|
||||
)]
|
||||
pub async fn handle_get_item_latest_content(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ItemContentQuery>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|s| s.split(',').map(|t| t.trim().to_string()).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
|
||||
// First find the item to get its ID and metadata
|
||||
let item_with_meta = item_service.find_item(vec![], tags, HashMap::new()).await;
|
||||
|
||||
match item_with_meta {
|
||||
Ok(item) => {
|
||||
let item_id = item.item.id.unwrap();
|
||||
let metadata = item.meta_as_map();
|
||||
// Handle as_meta parameter
|
||||
if params.as_meta {
|
||||
// Force stream=false and allow_binary=false for as_meta=true
|
||||
handle_as_meta_response_with_metadata(
|
||||
&item_service,
|
||||
item_id,
|
||||
&metadata,
|
||||
params.offset,
|
||||
params.length,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
stream_item_content_response_with_metadata(
|
||||
&item_service,
|
||||
item_id,
|
||||
&metadata,
|
||||
params.allow_binary,
|
||||
params.offset,
|
||||
params.length,
|
||||
params.stream,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
Err(CoreError::ItemNotFoundGeneric) => Err(StatusCode::NOT_FOUND),
|
||||
Err(e) => {
|
||||
warn!("Failed to find latest item for content: {}", e);
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/item/{item_id}/content",
|
||||
operation_id = "keep_get_item_content",
|
||||
summary = "Download item content",
|
||||
description = "Get raw content of a specific item by ID. Binary content can be restricted. \
|
||||
AI agents should use as_meta=true to get content and metadata in a structured JSON format.",
|
||||
responses(
|
||||
(status = 200, description = "Content retrieved"),
|
||||
(status = 400, description = "Invalid ID or binary not allowed"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Item not found"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
params(
|
||||
("item_id" = i64, Path, description = "Item ID"),
|
||||
("allow_binary" = Option<bool>, Query, description = "Allow binary content"),
|
||||
("offset" = Option<u64>, Query, description = "Byte offset to start reading"),
|
||||
("length" = Option<u64>, Query, description = "Number of bytes to read"),
|
||||
("stream" = Option<bool>, Query, description = "Stream response (true) or build in memory (false)"),
|
||||
("as_meta" = Option<bool>, Query, description = "Return content and metadata in JSON format (recommended for AI agents)")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "item"
|
||||
)]
|
||||
pub async fn handle_get_item_content(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<i64>,
|
||||
Query(params): Query<ItemQuery>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
// Validate that item ID is positive to prevent path traversal issues
|
||||
if item_id <= 0 {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
|
||||
debug!(
|
||||
"ITEM_API: Getting content for item {} with stream={}, allow_binary={}, offset={}, length={}",
|
||||
item_id, params.stream, params.allow_binary, params.offset, params.length
|
||||
);
|
||||
|
||||
let filter = build_filter_string(¶ms);
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
// Handle as_meta parameter
|
||||
if params.as_meta {
|
||||
// Force stream=false and allow_binary=false for as_meta=true
|
||||
let result =
|
||||
handle_as_meta_response(&item_service, item_id, params.offset, params.length).await;
|
||||
if let Ok(response) = &result {
|
||||
debug!(
|
||||
"ITEM_API: Response content-length: {:?}",
|
||||
response.headers().get("content-length")
|
||||
);
|
||||
}
|
||||
result
|
||||
} else {
|
||||
let result = stream_item_content_response(
|
||||
&item_service,
|
||||
item_id,
|
||||
params.allow_binary,
|
||||
params.offset,
|
||||
params.length,
|
||||
params.stream,
|
||||
filter,
|
||||
)
|
||||
.await;
|
||||
if let Ok(response) = &result {
|
||||
debug!(
|
||||
"ITEM_API: Response content-length: {:?}",
|
||||
response.headers().get("content-length")
|
||||
);
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
async fn stream_item_content_response(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
allow_binary: bool,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
stream: bool,
|
||||
filter: Option<String>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
debug!("STREAM_ITEM_CONTENT_RESPONSE: stream={}", stream);
|
||||
// Get the item with metadata once
|
||||
let item_with_meta = item_service.get_item(item_id).await.map_err(|e| {
|
||||
warn!("Failed to get item {} for content: {}", item_id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
stream_item_content_response_with_metadata(
|
||||
item_service,
|
||||
item_id,
|
||||
&metadata,
|
||||
allow_binary,
|
||||
offset,
|
||||
length,
|
||||
stream,
|
||||
filter,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn stream_item_content_response_with_metadata(
|
||||
item_service: &AsyncItemService,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
allow_binary: bool,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
stream: bool,
|
||||
filter: Option<String>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
debug!(
|
||||
"STREAM_ITEM_CONTENT_RESPONSE_WITH_METADATA: stream={}",
|
||||
stream
|
||||
);
|
||||
let mime_type = get_mime_type(metadata);
|
||||
|
||||
// Check if content is binary when allow_binary is false
|
||||
check_binary_content_allowed(item_service, item_id, metadata, allow_binary).await?;
|
||||
|
||||
if stream {
|
||||
debug!("STREAMING: Using streaming approach");
|
||||
match item_service
|
||||
.stream_item_content_by_id_with_metadata(
|
||||
item_id, metadata, true, offset, length, filter,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok((stream, _)) => {
|
||||
let body = axum::body::Body::from_stream(stream);
|
||||
let response = Response::builder()
|
||||
.header(header::CONTENT_TYPE, mime_type)
|
||||
.body(body)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
Ok(response)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to stream content for item {}: {}", item_id, e);
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
debug!("NON-STREAMING: Building full response in memory");
|
||||
match item_service.get_item_content_info(item_id, filter).await {
|
||||
Ok((content, _, _)) => {
|
||||
let response_content = apply_offset_length(&content, offset, length);
|
||||
|
||||
debug!(
|
||||
"NON-STREAMING: Content length: {}, response length: {}",
|
||||
content.len(),
|
||||
response_content.len()
|
||||
);
|
||||
|
||||
ResponseBuilder::binary(response_content, &mime_type)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to get content for item {}: {}", item_id, e);
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/item/latest/meta",
|
||||
operation_id = "keep_get_item_latest_meta",
|
||||
summary = "Get latest item metadata",
|
||||
description = "Retrieve metadata for the most recent item. Filter by tags.",
|
||||
responses(
|
||||
(status = 200, description = "Metadata retrieved", body = MetadataResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Item not found"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
params(
|
||||
("tags" = Option<String>, Query, description = "Tags to filter latest item")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "item"
|
||||
)]
|
||||
pub async fn handle_get_item_latest_meta(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<TagsQuery>,
|
||||
) -> Result<Json<ApiResponse<HashMap<String, String>>>, StatusCode> {
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|s| s.split(',').map(|t| t.trim().to_string()).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
let item_service = create_item_service(&state);
|
||||
|
||||
match item_service.find_item(vec![], tags, HashMap::new()).await {
|
||||
Ok(item_with_meta) => {
|
||||
let item_meta = item_with_meta.meta_as_map();
|
||||
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(item_meta),
|
||||
error: None,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
Err(e) => Err(handle_item_error(e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/item/{item_id}/meta",
|
||||
operation_id = "keep_get_item_meta",
|
||||
summary = "Get item metadata",
|
||||
description = "Retrieve metadata for a specific item by ID.",
|
||||
responses(
|
||||
(status = 200, description = "Metadata retrieved", body = MetadataResponse),
|
||||
(status = 400, description = "Invalid ID"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Item not found"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
params(
|
||||
("item_id" = i64, Path, description = "Item ID")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "item"
|
||||
)]
|
||||
pub async fn handle_get_item_meta(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<i64>,
|
||||
) -> Result<Json<ApiResponse<HashMap<String, String>>>, StatusCode> {
|
||||
let item_service = create_item_service(&state);
|
||||
|
||||
match item_service.get_item(item_id).await {
|
||||
Ok(item_with_meta) => {
|
||||
let item_meta = item_with_meta.meta_as_map();
|
||||
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(item_meta),
|
||||
error: None,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
Err(e) => Err(handle_item_error(e)),
|
||||
}
|
||||
}
|
||||
72
src/modes/server/api/mcp.rs
Normal file
72
src/modes/server/api/mcp.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::sse::{Event, KeepAlive, Sse},
|
||||
};
|
||||
use futures::stream::{self, Stream};
|
||||
use log::{debug, info};
|
||||
use std::convert::Infallible;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::modes::server::common::AppState;
|
||||
use crate::modes::server::mcp::KeepMcpServer;
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/mcp/sse",
|
||||
operation_id = "mcp_sse",
|
||||
summary = "MCP SSE endpoint",
|
||||
description = "Server-Sent Events for Model Context Protocol. Enables AI tools to interact with Keep's storage and retrieval functions.",
|
||||
responses(
|
||||
(status = 200, description = "SSE stream established"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "mcp"
|
||||
)]
|
||||
pub async fn handle_mcp_sse(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Sse<impl Stream<Item = Result<Event, Infallible>>>, StatusCode> {
|
||||
debug!("MCP: Starting SSE endpoint");
|
||||
|
||||
let _mcp_server = KeepMcpServer::new(state);
|
||||
|
||||
// Create a simple message channel for SSE communication
|
||||
let (tx, rx) = tokio::sync::mpsc::unbounded_channel::<String>();
|
||||
|
||||
// Send initial connection message
|
||||
let _ = tx.send("data: {\"type\":\"connection\",\"status\":\"connected\"}\n\n".to_string());
|
||||
|
||||
// For now, create a simple stream that sends periodic keep-alive messages
|
||||
// In a full implementation, this would integrate with the rmcp transport layer
|
||||
let stream = stream::unfold((rx, tx), |(mut rx, tx)| async move {
|
||||
tokio::select! {
|
||||
msg = rx.recv() => {
|
||||
match msg {
|
||||
Some(data) => {
|
||||
let event = Event::default().data(data);
|
||||
Some((Ok(event), (rx, tx)))
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
_ = tokio::time::sleep(Duration::from_secs(30)) => {
|
||||
let event = Event::default()
|
||||
.event("keep-alive")
|
||||
.data("ping");
|
||||
Some((Ok(event), (rx, tx)))
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
info!("MCP: SSE endpoint established");
|
||||
|
||||
Ok(Sse::new(stream).keep_alive(
|
||||
KeepAlive::new()
|
||||
.interval(Duration::from_secs(30))
|
||||
.text("keep-alive"),
|
||||
))
|
||||
}
|
||||
97
src/modes/server/api/mod.rs
Normal file
97
src/modes/server/api/mod.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
#[cfg(feature = "swagger")]
|
||||
pub mod item;
|
||||
#[cfg(feature = "mcp")]
|
||||
pub mod mcp;
|
||||
pub mod status;
|
||||
|
||||
use axum::{Router, routing::get};
|
||||
|
||||
use crate::modes::server::common::AppState;
|
||||
use utoipa::OpenApi;
|
||||
|
||||
#[cfg(feature = "swagger")]
|
||||
use utoipa_swagger_ui::SwaggerUi;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
info(
|
||||
title = "Keep API",
|
||||
version = "0.1.0",
|
||||
description = "REST API for Keep - a tool to manage temporary files with automatic compression and metadata generation",
|
||||
contact(
|
||||
name = "Keep Project",
|
||||
)
|
||||
),
|
||||
paths(
|
||||
status::handle_status,
|
||||
item::handle_list_items,
|
||||
item::handle_post_item,
|
||||
item::handle_get_item_latest_meta,
|
||||
item::handle_get_item_latest_content,
|
||||
item::handle_get_item_meta,
|
||||
item::handle_get_item_content,
|
||||
),
|
||||
components(
|
||||
schemas(
|
||||
crate::modes::server::common::ItemInfo,
|
||||
crate::modes::server::common::ItemContentInfo,
|
||||
crate::modes::server::common::ItemInfoListResponse,
|
||||
crate::modes::server::common::ItemInfoResponse,
|
||||
crate::modes::server::common::ItemContentInfoResponse,
|
||||
crate::modes::server::common::MetadataResponse,
|
||||
crate::modes::server::common::StatusInfoResponse,
|
||||
crate::common::status::StatusInfo,
|
||||
crate::modes::server::common::ItemQuery,
|
||||
crate::modes::server::common::ItemContentQuery,
|
||||
)
|
||||
),
|
||||
tags(
|
||||
(name = "status", description = "System status and health check endpoints"),
|
||||
(name = "item", description = "Item management endpoints for storing, retrieving, and managing content with metadata"),
|
||||
),
|
||||
servers(
|
||||
(url = "/", description = "Local server")
|
||||
)
|
||||
)]
|
||||
struct ApiDoc;
|
||||
|
||||
pub fn add_routes(router: Router<AppState>) -> Router<AppState> {
|
||||
let router = router
|
||||
// Status endpoints
|
||||
.route("/api/status", get(status::handle_status))
|
||||
// Item endpoints
|
||||
.route(
|
||||
"/api/item/",
|
||||
get(item::handle_list_items).post(item::handle_post_item),
|
||||
)
|
||||
.route(
|
||||
"/api/item/latest/meta",
|
||||
get(item::handle_get_item_latest_meta),
|
||||
)
|
||||
.route(
|
||||
"/api/item/latest/content",
|
||||
get(item::handle_get_item_latest_content),
|
||||
)
|
||||
.route("/api/item/{item_id}/meta", get(item::handle_get_item_meta))
|
||||
.route(
|
||||
"/api/item/{item_id}/content",
|
||||
get(item::handle_get_item_content),
|
||||
);
|
||||
|
||||
#[cfg(feature = "mcp")]
|
||||
{
|
||||
router = router.route("/mcp/sse", get(mcp::handle_mcp_sse));
|
||||
}
|
||||
|
||||
router
|
||||
}
|
||||
|
||||
#[cfg(feature = "swagger")]
|
||||
pub fn add_docs_routes(router: Router<AppState>) -> Router<AppState> {
|
||||
router.merge(SwaggerUi::new("/swagger").url("/openapi.json", ApiDoc::openapi()))
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "swagger"))]
|
||||
pub fn add_docs_routes(router: Router<AppState>) -> Router<AppState> {
|
||||
router
|
||||
}
|
||||
77
src/modes/server/api/status.rs
Normal file
77
src/modes/server/api/status.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use axum::{extract::State, http::StatusCode, response::Json};
|
||||
|
||||
use crate::modes::server::common::{AppState, StatusInfoResponse};
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/status",
|
||||
operation_id = "keep_status",
|
||||
summary = "Get system status",
|
||||
description = "Retrieve system status including database info, storage paths, compression engines, and metadata plugins.",
|
||||
responses(
|
||||
(status = 200, description = "System status retrieved", body = StatusInfoResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "Internal server error")
|
||||
),
|
||||
security(
|
||||
("bearerAuth" = [])
|
||||
),
|
||||
tag = "status"
|
||||
)]
|
||||
/// Axum handler for the /api/status GET endpoint.
|
||||
///
|
||||
/// Generates and returns comprehensive system status using the StatusService.
|
||||
/// Includes paths, plugins, compression info, and configuration details.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `State(state)` - The shared AppState containing settings, DB, and paths.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Ok(Json<StatusInfoResponse>)` - Success response with status data.
|
||||
/// * `Err(StatusCode)` - HTTP error status (e.g., 500 for internal errors; 401 if auth fails elsewhere).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns StatusCode::INTERNAL_SERVER_ERROR if status generation panics or fails (current impl assumes success).
|
||||
/// Auth errors are handled by middleware before reaching this handler.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// // In an Axum app:
|
||||
/// async fn app() -> Result<Json<StatusInfoResponse>, StatusCode> {
|
||||
/// handle_status(State(app_state)).await
|
||||
/// }
|
||||
/// ```
|
||||
pub async fn handle_status(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<StatusInfoResponse>, StatusCode> {
|
||||
// Get database path
|
||||
let db_path = state
|
||||
.db
|
||||
.lock()
|
||||
.await
|
||||
.path()
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
// Use the status service to generate status info showing configured plugins
|
||||
let status_service = crate::services::status_service::StatusService::new();
|
||||
let mut cmd = state.cmd.lock().await;
|
||||
let status_info = status_service.generate_status(
|
||||
&mut cmd,
|
||||
&state.settings,
|
||||
state.data_dir.clone(),
|
||||
db_path.into(),
|
||||
);
|
||||
|
||||
let response = StatusInfoResponse {
|
||||
success: true,
|
||||
data: Some(status_info),
|
||||
error: None,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
@@ -1,94 +1,816 @@
|
||||
use crate::services::item_service::ItemService;
|
||||
/// Common utilities and types for the server module.
|
||||
///
|
||||
/// This module provides shared structures, functions, and middleware used across
|
||||
/// different parts of the server implementation, including configuration, state
|
||||
/// management, API responses, authentication, and logging.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::{ServerConfig, AppState};
|
||||
/// let config = ServerConfig { address: "127.0.0.1".to_string(), ..Default::default() };
|
||||
/// let state = AppState { /* ... */ };
|
||||
/// ```
|
||||
use anyhow::Result;
|
||||
use axum::http::HeaderMap;
|
||||
use log::info;
|
||||
use axum::{
|
||||
extract::{ConnectInfo, Request},
|
||||
http::{HeaderMap, StatusCode},
|
||||
middleware::Next,
|
||||
response::Response,
|
||||
};
|
||||
use base64::Engine;
|
||||
use log::{info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::sync::Mutex;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::Args;
|
||||
|
||||
/// Server configuration structure.
|
||||
///
|
||||
/// This struct holds the configuration parameters for the HTTP server, including
|
||||
/// binding address, port, and authentication settings.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let config = ServerConfig {
|
||||
/// address: "127.0.0.1".to_string(),
|
||||
/// port: Some(8080),
|
||||
/// password: None,
|
||||
/// password_hash: None,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ServerConfig {
|
||||
/// Server bind address.
|
||||
///
|
||||
/// The IP address or hostname the server should bind to. Defaults to "127.0.0.1"
|
||||
/// for local-only access.
|
||||
pub address: String,
|
||||
/// Optional server port.
|
||||
///
|
||||
/// The TCP port number to listen on. If not specified, a default port (typically
|
||||
/// 8080 or 21080) will be used.
|
||||
pub port: Option<u16>,
|
||||
/// Optional authentication password.
|
||||
///
|
||||
/// Plain text password for basic or bearer token authentication. This should be
|
||||
/// used only for testing or low-security environments.
|
||||
pub password: Option<String>,
|
||||
/// Optional hashed authentication password.
|
||||
///
|
||||
/// Pre-hashed password (Unix crypt format) for secure authentication. Preferred
|
||||
/// over plain text password for production use.
|
||||
pub password_hash: Option<String>,
|
||||
}
|
||||
|
||||
impl FromStr for ServerConfig {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(ServerConfig {
|
||||
address: s.to_string(),
|
||||
password: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Application state shared across all routes.
|
||||
///
|
||||
/// This struct encapsulates the shared state that is accessible to all request handlers,
|
||||
/// including database connections, file paths, services, and configuration.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::AppState;
|
||||
/// use std::sync::Arc;
|
||||
/// use tokio::sync::Mutex;
|
||||
/// let state = AppState {
|
||||
/// db: Arc::new(Mutex::new(conn)),
|
||||
/// data_dir: PathBuf::from("/data"),
|
||||
/// item_service: Arc::new(ItemService::new(data_dir.clone())),
|
||||
/// cmd: Arc::new(Mutex::new(Command::new("keep"))),
|
||||
/// settings: Arc::new(settings),
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
/// Database connection wrapped in Arc<Mutex>.
|
||||
///
|
||||
/// A thread-safe reference to the SQLite database connection, protected by a mutex
|
||||
/// for concurrent access safety.
|
||||
pub db: Arc<Mutex<rusqlite::Connection>>,
|
||||
/// Data directory path.
|
||||
///
|
||||
/// The root directory where item files are stored.
|
||||
pub data_dir: PathBuf,
|
||||
pub password: Option<String>,
|
||||
pub args: Arc<Args>,
|
||||
/// Item service instance.
|
||||
///
|
||||
/// Shared reference to the service handling item storage and retrieval operations.
|
||||
pub item_service: Arc<ItemService>,
|
||||
/// Command line argument parser.
|
||||
///
|
||||
/// Thread-safe reference to the Clap command builder for configuration access.
|
||||
pub cmd: Arc<Mutex<clap::Command>>,
|
||||
/// Application settings.
|
||||
///
|
||||
/// Shared reference to the application's configuration settings.
|
||||
pub settings: Arc<crate::config::Settings>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
/// Standard API response wrapper containing success status, data payload, and error information.
|
||||
///
|
||||
/// This generic type is used for all API responses to provide a consistent structure across
|
||||
/// different endpoints.
|
||||
///
|
||||
/// # Type Parameters
|
||||
///
|
||||
/// * `T` - The type of the data payload.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ApiResponse;
|
||||
/// let response: ApiResponse<Vec<ItemInfo>> = ApiResponse {
|
||||
/// success: true,
|
||||
/// data: Some(items),
|
||||
/// error: None,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Debug, Serialize, Deserialize, ToSchema)]
|
||||
#[schema(
|
||||
description = "Standard API response wrapper containing success status, data payload, and error information"
|
||||
)]
|
||||
pub struct ApiResponse<T> {
|
||||
/// Success indicator.
|
||||
///
|
||||
/// Boolean flag indicating whether the request was successful.
|
||||
pub success: bool,
|
||||
/// Optional data payload.
|
||||
///
|
||||
/// The actual response data, present only if the request was successful.
|
||||
pub data: Option<T>,
|
||||
/// Optional error message.
|
||||
///
|
||||
/// Error description, present only if the request failed.
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
/// Response type for list of item information.
|
||||
///
|
||||
/// Specialized response for endpoints that return multiple items.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ItemInfoListResponse;
|
||||
/// let response = ItemInfoListResponse {
|
||||
/// success: true,
|
||||
/// data: Some(vec![item_info]),
|
||||
/// error: None,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
pub struct ItemInfoListResponse {
|
||||
/// Success indicator.
|
||||
///
|
||||
/// Boolean flag indicating whether the request was successful.
|
||||
pub success: bool,
|
||||
/// Optional list of item information.
|
||||
///
|
||||
/// Vector of `ItemInfo` structures containing details about each item.
|
||||
pub data: Option<Vec<ItemInfo>>,
|
||||
/// Optional error message.
|
||||
///
|
||||
/// Error description if the request failed.
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// Response type for single item information.
|
||||
///
|
||||
/// Specialized response for endpoints that return a single item's details.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ItemInfoResponse;
|
||||
/// let response = ItemInfoResponse {
|
||||
/// success: true,
|
||||
/// data: Some(item_info),
|
||||
/// error: None,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
pub struct ItemInfoResponse {
|
||||
/// Success indicator.
|
||||
///
|
||||
/// Boolean flag indicating whether the request was successful.
|
||||
pub success: bool,
|
||||
/// Optional item information.
|
||||
///
|
||||
/// The `ItemInfo` structure containing details about the item.
|
||||
pub data: Option<ItemInfo>,
|
||||
/// Optional error message.
|
||||
///
|
||||
/// Error description if the request failed.
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// Response type for item content information.
|
||||
///
|
||||
/// Specialized response for endpoints that return item content and related metadata.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ItemContentInfoResponse;
|
||||
/// let response = ItemContentInfoResponse {
|
||||
/// success: true,
|
||||
/// data: Some(content_info),
|
||||
/// error: None,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
pub struct ItemContentInfoResponse {
|
||||
/// Success indicator.
|
||||
///
|
||||
/// Boolean flag indicating whether the request was successful.
|
||||
pub success: bool,
|
||||
/// Optional item content information.
|
||||
///
|
||||
/// The `ItemContentInfo` structure containing content and metadata.
|
||||
pub data: Option<ItemContentInfo>,
|
||||
/// Optional error message.
|
||||
///
|
||||
/// Error description if the request failed.
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// Response type for metadata.
|
||||
///
|
||||
/// Specialized response for metadata-only endpoints.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::MetadataResponse;
|
||||
/// let response = MetadataResponse {
|
||||
/// success: true,
|
||||
/// data: Some(meta_map),
|
||||
/// error: None,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
pub struct MetadataResponse {
|
||||
/// Success indicator.
|
||||
///
|
||||
/// Boolean flag indicating whether the request was successful.
|
||||
pub success: bool,
|
||||
/// Optional metadata hashmap.
|
||||
///
|
||||
/// HashMap containing key-value pairs of metadata.
|
||||
pub data: Option<HashMap<String, String>>,
|
||||
/// Optional error message.
|
||||
///
|
||||
/// Error description if the request failed.
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// Response type for status information.
|
||||
///
|
||||
/// Specialized response for system status endpoints.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::StatusInfoResponse;
|
||||
/// let response = StatusInfoResponse {
|
||||
/// success: true,
|
||||
/// data: Some(status_info),
|
||||
/// error: None,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
pub struct StatusInfoResponse {
|
||||
/// Success indicator.
|
||||
///
|
||||
/// Boolean flag indicating whether the request was successful.
|
||||
pub success: bool,
|
||||
/// Optional status information.
|
||||
///
|
||||
/// The `StatusInfo` structure containing system status details.
|
||||
pub data: Option<crate::common::status::StatusInfo>,
|
||||
/// Optional error message.
|
||||
///
|
||||
/// Error description if the request failed.
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// Complete information about a stored item including metadata and tags.
|
||||
///
|
||||
/// This structure represents the full details of an item, combining basic item
|
||||
/// properties with associated tags and metadata.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ItemInfo;
|
||||
/// use std::collections::HashMap;
|
||||
/// let item_info = ItemInfo {
|
||||
/// id: 42,
|
||||
/// ts: "2023-12-01T15:30:45Z".to_string(),
|
||||
/// size: Some(1024),
|
||||
/// compression: "gzip".to_string(),
|
||||
/// tags: vec!["important".to_string()],
|
||||
/// metadata: HashMap::from([("mime_type".to_string(), "text/plain".to_string())]),
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
#[schema(description = "Complete information about a stored item including metadata and tags")]
|
||||
pub struct ItemInfo {
|
||||
/// Item ID.
|
||||
///
|
||||
/// The unique identifier for the item in the database.
|
||||
#[schema(example = 42)]
|
||||
pub id: i64,
|
||||
/// Timestamp.
|
||||
///
|
||||
/// The creation timestamp of the item in ISO 8601 format.
|
||||
#[schema(example = "2023-12-01T15:30:45Z")]
|
||||
pub ts: String,
|
||||
/// Size in bytes.
|
||||
///
|
||||
/// The size of the item's content in bytes, may be None if not set.
|
||||
#[schema(example = 1024)]
|
||||
pub size: Option<i64>,
|
||||
/// Compression type.
|
||||
///
|
||||
/// The compression algorithm used for the item's content.
|
||||
#[schema(example = "gzip")]
|
||||
pub compression: String,
|
||||
/// List of tags.
|
||||
///
|
||||
/// Vector of strings representing the tags associated with the item.
|
||||
#[schema(example = json!(["important", "work", "document"]))]
|
||||
pub tags: Vec<String>,
|
||||
/// Metadata hashmap.
|
||||
///
|
||||
/// Key-value pairs containing additional metadata about the item.
|
||||
#[schema(example = json!({"mime_type": "text/plain", "mime_encoding": "utf-8", "line_count": "42"}))]
|
||||
pub metadata: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Item information including content and metadata, with binary detection.
|
||||
///
|
||||
/// This structure provides item details along with its content, handling binary
|
||||
/// content detection and safe string representation.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ItemContentInfo;
|
||||
/// use std::collections::HashMap;
|
||||
/// let content_info = ItemContentInfo {
|
||||
/// metadata: HashMap::from([("mime_type".to_string(), "text/plain".to_string())]),
|
||||
/// content: Some("Hello, world!".to_string()),
|
||||
/// binary: false,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
#[schema(description = "Item information including content and metadata, with binary detection")]
|
||||
pub struct ItemContentInfo {
|
||||
/// Metadata hashmap.
|
||||
///
|
||||
/// Key-value pairs of metadata, flattened into the structure.
|
||||
#[serde(flatten)]
|
||||
#[schema(example = json!({"mime_type": "text/plain", "mime_encoding": "utf-8", "line_count": "42"}))]
|
||||
pub metadata: HashMap<String, String>,
|
||||
/// Optional content as string.
|
||||
///
|
||||
/// The item's content as a string, only present if the content is text.
|
||||
#[schema(example = "Hello, world!\nThis is the content of the file.")]
|
||||
pub content: Option<String>,
|
||||
/// Binary content indicator.
|
||||
///
|
||||
/// Boolean flag indicating whether the content is binary (true) or text (false).
|
||||
#[schema(example = false)]
|
||||
pub binary: bool,
|
||||
}
|
||||
|
||||
/// Query parameters for tags.
|
||||
///
|
||||
/// Structure for handling tag-based query parameters in API requests.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::TagsQuery;
|
||||
/// let query = TagsQuery { tags: Some("tag1,tag2".to_string()) };
|
||||
/// ```
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TagsQuery {
|
||||
/// Optional comma-separated tags.
|
||||
///
|
||||
/// String containing comma-separated tag names for filtering.
|
||||
pub tags: Option<String>,
|
||||
}
|
||||
|
||||
pub fn check_auth(headers: &HeaderMap, password: &Option<String>) -> bool {
|
||||
if let Some(expected_password) = password {
|
||||
if let Some(auth_header) = headers.get("authorization") {
|
||||
if let Ok(auth_str) = auth_header.to_str() {
|
||||
return auth_str.starts_with("Bearer ") && &auth_str[7..] == expected_password;
|
||||
/// Query parameters for listing items.
|
||||
///
|
||||
/// Structure for pagination and sorting parameters in item listing endpoints.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ListItemsQuery;
|
||||
/// let query = ListItemsQuery {
|
||||
/// tags: Some("important".to_string()),
|
||||
/// order: Some("newest".to_string()),
|
||||
/// start: Some(0),
|
||||
/// count: Some(10),
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ListItemsQuery {
|
||||
/// Optional comma-separated tags for filtering.
|
||||
///
|
||||
/// String containing tags to filter the item list.
|
||||
pub tags: Option<String>,
|
||||
/// Optional sort order.
|
||||
///
|
||||
/// String specifying sort direction: "newest" or "oldest".
|
||||
pub order: Option<String>,
|
||||
/// Optional pagination start index.
|
||||
///
|
||||
/// Unsigned integer indicating the starting index for pagination.
|
||||
pub start: Option<u32>,
|
||||
/// Optional number of items to return.
|
||||
///
|
||||
/// Unsigned integer limiting the number of items returned.
|
||||
pub count: Option<u32>,
|
||||
}
|
||||
|
||||
/// Query parameters for item retrieval.
|
||||
///
|
||||
/// Structure for content retrieval parameters, including binary handling and streaming options.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ItemQuery;
|
||||
/// let query = ItemQuery {
|
||||
/// allow_binary: true,
|
||||
/// offset: 0,
|
||||
/// length: 1024,
|
||||
/// stream: false,
|
||||
/// as_meta: false,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Debug, Deserialize, utoipa::ToSchema)]
|
||||
pub struct ItemQuery {
|
||||
/// Allow binary content (default: true).
|
||||
///
|
||||
/// Boolean flag to allow or deny binary content in responses.
|
||||
#[serde(default = "default_allow_binary")]
|
||||
pub allow_binary: bool,
|
||||
/// Byte offset (default: 0).
|
||||
///
|
||||
/// Unsigned integer specifying the starting byte position for content retrieval.
|
||||
#[serde(default)]
|
||||
pub offset: u64,
|
||||
/// Byte length (default: 0, meaning all).
|
||||
///
|
||||
/// Unsigned integer specifying the maximum number of bytes to retrieve (0 = all remaining).
|
||||
#[serde(default)]
|
||||
pub length: u64,
|
||||
/// Stream response (default: false).
|
||||
///
|
||||
/// Boolean flag to enable streaming responses for large content.
|
||||
#[serde(default = "default_stream")]
|
||||
pub stream: bool,
|
||||
/// Return as metadata JSON (default: false).
|
||||
///
|
||||
/// Boolean flag to return content and metadata in a structured JSON format.
|
||||
#[serde(default = "default_as_meta")]
|
||||
pub as_meta: bool,
|
||||
}
|
||||
|
||||
/// Query parameters for item content retrieval.
|
||||
///
|
||||
/// Extended query parameters for content-specific operations, including tag filtering.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use keep::modes::server::common::ItemContentQuery;
|
||||
/// let query = ItemContentQuery {
|
||||
/// tags: Some("important".to_string()),
|
||||
/// allow_binary: true,
|
||||
/// offset: 0,
|
||||
/// length: 1024,
|
||||
/// stream: false,
|
||||
/// as_meta: false,
|
||||
/// };
|
||||
/// ```
|
||||
#[derive(Debug, Deserialize, utoipa::ToSchema)]
|
||||
pub struct ItemContentQuery {
|
||||
/// Optional comma-separated tags for filtering.
|
||||
///
|
||||
/// String containing tags to filter the item selection.
|
||||
pub tags: Option<String>,
|
||||
/// Allow binary content (default: true).
|
||||
///
|
||||
/// Boolean flag to allow or deny binary content in responses.
|
||||
#[serde(default = "default_allow_binary")]
|
||||
pub allow_binary: bool,
|
||||
/// Byte offset (default: 0).
|
||||
///
|
||||
/// Unsigned integer specifying the starting byte position for content retrieval.
|
||||
#[serde(default)]
|
||||
pub offset: u64,
|
||||
/// Byte length (default: 0, meaning all).
|
||||
///
|
||||
/// Unsigned integer specifying the maximum number of bytes to retrieve (0 = all remaining).
|
||||
#[serde(default)]
|
||||
pub length: u64,
|
||||
/// Stream response (default: false).
|
||||
///
|
||||
/// Boolean flag to enable streaming responses for large content.
|
||||
#[serde(default = "default_stream")]
|
||||
pub stream: bool,
|
||||
/// Return as metadata JSON (default: false).
|
||||
///
|
||||
/// Boolean flag to return content and metadata in a structured JSON format.
|
||||
#[serde(default = "default_as_meta")]
|
||||
pub as_meta: bool,
|
||||
}
|
||||
|
||||
/// Default function for allow_binary parameter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `true` as the default value for allowing binary content.
|
||||
fn default_allow_binary() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Default function for stream parameter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `false` as the default value for streaming responses.
|
||||
fn default_stream() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Default function for as_meta parameter.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `false` as the default value for metadata JSON responses.
|
||||
fn default_as_meta() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Validates bearer authentication token.
|
||||
///
|
||||
/// This function checks if the provided authorization string is a valid Bearer token
|
||||
/// matching the expected password or hash.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `auth_str` - The authorization string from the header.
|
||||
/// * `expected_password` - The expected plain text password.
|
||||
/// * `expected_hash` - Optional expected password hash.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `true` - If authentication succeeds.
|
||||
/// * `false` - Otherwise.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// None; returns false on failure.
|
||||
fn check_bearer_auth(
|
||||
auth_str: &str,
|
||||
expected_password: &str,
|
||||
expected_hash: &Option<String>,
|
||||
) -> bool {
|
||||
if !auth_str.starts_with("Bearer ") {
|
||||
return false;
|
||||
}
|
||||
|
||||
let provided_password = &auth_str[7..];
|
||||
|
||||
// If we have a password hash, verify against it
|
||||
if let Some(hash) = expected_hash {
|
||||
return pwhash::unix::verify(provided_password, hash);
|
||||
}
|
||||
|
||||
// Otherwise, do direct comparison
|
||||
provided_password == expected_password
|
||||
}
|
||||
|
||||
/// Validates basic authentication credentials.
|
||||
///
|
||||
/// This function decodes and validates Basic Auth credentials from the authorization
|
||||
/// header against the expected password or hash.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `auth_str` - The authorization string from the header.
|
||||
/// * `expected_password` - The expected plain text password.
|
||||
/// * `expected_hash` - Optional expected password hash.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `true` - If authentication succeeds.
|
||||
/// * `false` - Otherwise.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns false on decode or validation failure.
|
||||
fn check_basic_auth(
|
||||
auth_str: &str,
|
||||
expected_password: &str,
|
||||
expected_hash: &Option<String>,
|
||||
) -> bool {
|
||||
if !auth_str.starts_with("Basic ") {
|
||||
return false;
|
||||
}
|
||||
|
||||
let encoded = &auth_str[6..];
|
||||
if let Ok(decoded_bytes) = base64::engine::general_purpose::STANDARD.decode(encoded) {
|
||||
if let Ok(decoded_str) = String::from_utf8(decoded_bytes) {
|
||||
if let Some(colon_pos) = decoded_str.find(':') {
|
||||
let provided_password = &decoded_str[colon_pos + 1..];
|
||||
|
||||
// If we have a password hash, verify against it
|
||||
if let Some(hash) = expected_hash {
|
||||
return pwhash::unix::verify(provided_password, hash);
|
||||
}
|
||||
|
||||
// Otherwise, do direct comparison
|
||||
let expected_credentials = format!("keep:{}", expected_password);
|
||||
return decoded_str == expected_credentials;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
} else {
|
||||
true // No password required
|
||||
}
|
||||
}
|
||||
|
||||
// Custom middleware for logging requests and responses
|
||||
/// Checks authorization header for valid credentials.
|
||||
///
|
||||
/// This function inspects the HTTP Authorization header for valid Bearer or Basic
|
||||
/// authentication credentials against the provided password or hash.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `headers` - HTTP headers from the request.
|
||||
/// * `password` - Optional expected password.
|
||||
/// * `password_hash` - Optional expected password hash.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `true` - If authorized (or no auth required).
|
||||
/// * `false` - If unauthorized.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// if check_auth(&headers, &Some("pass".to_string()), &None) {
|
||||
/// // Proceed
|
||||
/// }
|
||||
/// ```
|
||||
pub fn check_auth(
|
||||
headers: &HeaderMap,
|
||||
password: &Option<String>,
|
||||
password_hash: &Option<String>,
|
||||
) -> bool {
|
||||
// If neither password nor hash is set, no authentication required
|
||||
if password.is_none() && password_hash.is_none() {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(auth_header) = headers.get("authorization") {
|
||||
if let Ok(auth_str) = auth_header.to_str() {
|
||||
return check_bearer_auth(auth_str, password.as_deref().unwrap_or(""), password_hash)
|
||||
|| check_basic_auth(auth_str, password.as_deref().unwrap_or(""), password_hash);
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Middleware for logging requests and responses.
|
||||
///
|
||||
/// This middleware logs incoming requests and outgoing responses, including method,
|
||||
/// URI, status code, response size, duration, and Accept header.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `ConnectInfo(addr)` - Connection info with client address.
|
||||
/// * `request` - Incoming request.
|
||||
/// * `next` - Next middleware.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The processed response with logging.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Used in Axum router: `.layer_handler(logging_middleware)`.
|
||||
pub async fn logging_middleware(
|
||||
req: axum::http::Request<axum::body::Body>,
|
||||
next: axum::middleware::Next,
|
||||
) -> Result<axum::http::Response<axum::body::Body>, axum::response::Response> {
|
||||
let method = req.method().clone();
|
||||
let uri = req.uri().clone();
|
||||
let headers = req.headers().clone();
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
request: Request,
|
||||
next: Next,
|
||||
) -> Response {
|
||||
let method = request.method().clone();
|
||||
let uri = request.uri().clone();
|
||||
|
||||
// Log incoming request
|
||||
info!("SERVER: {} {} - Headers: {:?}", method, uri, headers);
|
||||
// Log the Accept header - extract before moving the request
|
||||
let accept_header = request
|
||||
.headers()
|
||||
.get("accept")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("-")
|
||||
.to_string();
|
||||
|
||||
let start = Instant::now();
|
||||
let response = next.run(req).await;
|
||||
let response = next.run(request).await;
|
||||
let duration = start.elapsed();
|
||||
|
||||
// Log response
|
||||
info!("SERVER: {} {} - Status: {} - Duration: {:?}", method, uri, response.status(), duration);
|
||||
// Try to get response body size from content-length header, or default to 0
|
||||
let response_content_length = response
|
||||
.headers()
|
||||
.get("content-length")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|s| s.parse::<u64>().ok())
|
||||
.unwrap_or(0);
|
||||
|
||||
Ok(response)
|
||||
info!(
|
||||
"{} {} {} {} {} bytes - {:?} - Accept: {}",
|
||||
addr,
|
||||
method,
|
||||
uri,
|
||||
response.status(),
|
||||
response_content_length,
|
||||
duration,
|
||||
accept_header
|
||||
);
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
/// Creates authentication middleware for the application.
|
||||
///
|
||||
/// This function returns a middleware that enforces authentication on protected routes
|
||||
/// using Bearer token or Basic Auth, challenging unauthorized requests with appropriate
|
||||
/// headers.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `password` - Optional plain text password.
|
||||
/// * `password_hash` - Optional hashed password.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A clonable async middleware function for Axum.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let auth_middleware = create_auth_middleware(Some("pass".to_string()), None);
|
||||
/// router.layer(auth_middleware);
|
||||
/// ```
|
||||
pub fn create_auth_middleware(
|
||||
password: Option<String>,
|
||||
password_hash: Option<String>,
|
||||
) -> impl Fn(
|
||||
ConnectInfo<SocketAddr>,
|
||||
Request,
|
||||
Next,
|
||||
)
|
||||
-> std::pin::Pin<Box<dyn std::future::Future<Output = Result<Response, StatusCode>> + Send>>
|
||||
+ Clone
|
||||
+ Send {
|
||||
move |ConnectInfo(addr): ConnectInfo<SocketAddr>, request: Request, next: Next| {
|
||||
let password = password.clone();
|
||||
let password_hash = password_hash.clone();
|
||||
Box::pin(async move {
|
||||
let headers = request.headers().clone();
|
||||
let uri = request.uri().clone();
|
||||
|
||||
if !check_auth(&headers, &password, &password_hash) {
|
||||
warn!("Unauthorized request to {} from {}", uri, addr);
|
||||
// Add WWW-Authenticate header to trigger basic auth in browsers
|
||||
let mut response = Response::new(axum::body::Body::from("Unauthorized"));
|
||||
*response.status_mut() = StatusCode::UNAUTHORIZED;
|
||||
response.headers_mut().insert(
|
||||
"www-authenticate",
|
||||
"Basic realm=\"Keep Server\", charset=\"UTF-8\""
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
let response = next.run(request).await;
|
||||
Ok(response)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,181 +0,0 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use axum::{
|
||||
extract::{ConnectInfo, Path, Query, State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::Json,
|
||||
};
|
||||
use log::warn;
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::compression_engine::{CompressionType, get_compression_engine};
|
||||
use crate::db;
|
||||
use super::common::{AppState, ApiResponse, TagsQuery, check_auth};
|
||||
|
||||
pub async fn handle_get_content_latest(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<TagsQuery>,
|
||||
headers: HeaderMap,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Result<Json<ApiResponse<String>>, StatusCode> {
|
||||
if !check_auth(&headers, &state.password) {
|
||||
warn!("Unauthorized request to /content from {}", addr);
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
let item = if let Some(tags_str) = params.tags {
|
||||
let tags: Vec<String> = tags_str.split(',').map(|t| t.trim().to_string()).collect();
|
||||
db::get_item_matching(&mut *conn, &tags, &HashMap::new())
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get item matching tags {:?} for content: {}", tags, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
} else {
|
||||
db::get_item_last(&mut *conn).map_err(|e| {
|
||||
warn!("Failed to get last item for content: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
};
|
||||
|
||||
if let Some(item) = item {
|
||||
match get_item_content(&item, &state.data_dir).await {
|
||||
Ok(content) => {
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(content),
|
||||
error: None,
|
||||
};
|
||||
Ok(Json(response))
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to get content for item {}: {}", item.id.unwrap_or(0), e);
|
||||
let response = ApiResponse::<String> {
|
||||
success: false,
|
||||
data: None,
|
||||
error: Some(format!("Failed to retrieve content: {}", e)),
|
||||
};
|
||||
Ok(Json(response))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn handle_get_content(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<String>,
|
||||
headers: HeaderMap,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Result<Json<ApiResponse<String>>, StatusCode> {
|
||||
if !check_auth(&headers, &state.password) {
|
||||
warn!("Unauthorized request to /content/{} from {}", item_id, addr);
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
if let Ok(id) = item_id.parse::<i64>() {
|
||||
// Validate that item ID is positive to prevent path traversal issues
|
||||
if id <= 0 {
|
||||
warn!("Invalid item ID {} from {}", id, addr);
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
if let Some(item) = db::get_item(&mut *conn, id).map_err(|e| {
|
||||
warn!("Failed to get item {} for content: {}", id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})? {
|
||||
match get_item_content(&item, &state.data_dir).await {
|
||||
Ok(content) => {
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(content),
|
||||
error: None,
|
||||
};
|
||||
Ok(Json(response))
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to get content for item {}: {}", id, e);
|
||||
let response = ApiResponse::<String> {
|
||||
success: false,
|
||||
data: None,
|
||||
error: Some(format!("Failed to retrieve content: {}", e)),
|
||||
};
|
||||
Ok(Json(response))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
} else {
|
||||
Err(StatusCode::BAD_REQUEST)
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_item_content(item: &db::Item, data_dir: &PathBuf) -> Result<String> {
|
||||
let item_id = item.id.ok_or_else(|| anyhow!("Item missing ID"))?;
|
||||
|
||||
// Validate that item ID is positive to prevent path traversal issues
|
||||
if item_id <= 0 {
|
||||
return Err(anyhow!("Invalid item ID: {}", item_id));
|
||||
}
|
||||
|
||||
let mut item_path = data_dir.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
|
||||
let compression_type = CompressionType::from_str(&item.compression)?;
|
||||
let compression_engine = get_compression_engine(compression_type)?;
|
||||
|
||||
// Read the content using the compression engine
|
||||
let mut reader = compression_engine.open(item_path)?;
|
||||
let mut content = String::new();
|
||||
reader.read_to_string(&mut content)?;
|
||||
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
pub fn get_content_openapi_spec() -> serde_json::Value {
|
||||
json!({
|
||||
"/content": {
|
||||
"get": {
|
||||
"summary": "Get content of latest item",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "tags",
|
||||
"in": "query",
|
||||
"schema": {"type": "string"},
|
||||
"description": "Comma-separated list of tags to filter by"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {"description": "Item content"},
|
||||
"404": {"description": "No items found"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/content/{id}": {
|
||||
"get": {
|
||||
"summary": "Get content by item ID",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {"type": "integer"}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {"description": "Item content"},
|
||||
"404": {"description": "Item not found"}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
use axum::response::{Html, Json};
|
||||
use serde_json::json;
|
||||
|
||||
use super::status::get_status_openapi_spec;
|
||||
use super::items::get_items_openapi_spec;
|
||||
use super::content::get_content_openapi_spec;
|
||||
|
||||
pub async fn handle_openapi() -> Json<serde_json::Value> {
|
||||
let mut paths = json!({});
|
||||
|
||||
// Merge all endpoint specifications
|
||||
let status_paths = get_status_openapi_spec();
|
||||
let items_paths = get_items_openapi_spec();
|
||||
let content_paths = get_content_openapi_spec();
|
||||
|
||||
// Merge the path objects
|
||||
if let serde_json::Value::Object(ref mut paths_map) = paths {
|
||||
if let serde_json::Value::Object(status_map) = status_paths {
|
||||
for (key, value) in status_map {
|
||||
paths_map.insert(key, value);
|
||||
}
|
||||
}
|
||||
if let serde_json::Value::Object(items_map) = items_paths {
|
||||
for (key, value) in items_map {
|
||||
paths_map.insert(key, value);
|
||||
}
|
||||
}
|
||||
if let serde_json::Value::Object(content_map) = content_paths {
|
||||
for (key, value) in content_map {
|
||||
paths_map.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let openapi_spec = json!({
|
||||
"openapi": "3.0.0",
|
||||
"info": {
|
||||
"title": "Keep API",
|
||||
"version": "1.0.0",
|
||||
"description": "REST API for the Keep data storage system"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "/",
|
||||
"description": "Local server"
|
||||
}
|
||||
],
|
||||
"components": {
|
||||
"securitySchemes": {
|
||||
"bearerAuth": {
|
||||
"type": "http",
|
||||
"scheme": "bearer"
|
||||
}
|
||||
},
|
||||
"schemas": {
|
||||
"ItemInfo": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {"type": "integer"},
|
||||
"ts": {"type": "string", "format": "date-time"},
|
||||
"size": {"type": "integer", "nullable": true},
|
||||
"compression": {"type": "string"},
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"metadata": {"type": "object"}
|
||||
}
|
||||
},
|
||||
"StatusInfo": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"version": {"type": "string"},
|
||||
"database_path": {"type": "string"},
|
||||
"data_directory": {"type": "string"},
|
||||
"compression_engines": {"type": "array", "items": {"type": "string"}},
|
||||
"meta_plugins": {"type": "array", "items": {"type": "string"}}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [{"bearerAuth": []}],
|
||||
"paths": paths
|
||||
});
|
||||
|
||||
Json(openapi_spec)
|
||||
}
|
||||
|
||||
pub async fn handle_swagger_ui() -> Html<&'static str> {
|
||||
let html = r#"<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Keep API Documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="https://unpkg.com/swagger-ui-dist@3.52.5/swagger-ui.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="swagger-ui"></div>
|
||||
<script src="https://unpkg.com/swagger-ui-dist@3.52.5/swagger-ui-bundle.js"></script>
|
||||
<script>
|
||||
SwaggerUIBundle({
|
||||
url: '/openapi.json',
|
||||
dom_id: '#swagger-ui',
|
||||
presets: [
|
||||
SwaggerUIBundle.presets.apis,
|
||||
SwaggerUIBundle.presets.standalone
|
||||
]
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>"#;
|
||||
|
||||
Html(html)
|
||||
}
|
||||
@@ -1,311 +0,0 @@
|
||||
use axum::{
|
||||
extract::{ConnectInfo, Path, Query, State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::Json,
|
||||
};
|
||||
use log::warn;
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use crate::db;
|
||||
use super::common::{AppState, ApiResponse, ItemInfo, TagsQuery, check_auth};
|
||||
|
||||
pub async fn handle_list_items(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<TagsQuery>,
|
||||
headers: HeaderMap,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Result<Json<ApiResponse<Vec<ItemInfo>>>, StatusCode> {
|
||||
if !check_auth(&headers, &state.password) {
|
||||
warn!("Unauthorized request to /item/ from {}", addr);
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
let tags: Vec<String> = params.tags
|
||||
.map(|s| s.split(',').map(|t| t.trim().to_string()).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
let items = if tags.is_empty() {
|
||||
db::get_items(&mut *conn).map_err(|e| {
|
||||
warn!("Failed to get items: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
} else {
|
||||
db::get_items_matching(&mut *conn, &tags, &HashMap::new())
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get items matching tags {:?}: {}", tags, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
};
|
||||
|
||||
// Get item IDs for batch queries
|
||||
let item_ids: Vec<i64> = items.iter().filter_map(|item| item.id).collect();
|
||||
|
||||
// Get tags and metadata for all items
|
||||
let tags_map = db::get_tags_for_items(&mut *conn, &item_ids)
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get tags for items: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
let meta_map = db::get_meta_for_items(&mut *conn, &item_ids)
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get metadata for items: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let item_infos: Vec<ItemInfo> = items
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
let item_id = item.id.unwrap_or(0);
|
||||
let item_tags = tags_map.get(&item_id)
|
||||
.map(|tags| tags.iter().map(|t| t.name.clone()).collect())
|
||||
.unwrap_or_default();
|
||||
let item_meta = meta_map.get(&item_id)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
ItemInfo {
|
||||
id: item_id,
|
||||
ts: item.ts.to_rfc3339(),
|
||||
size: item.size,
|
||||
compression: item.compression,
|
||||
tags: item_tags,
|
||||
metadata: item_meta,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(item_infos),
|
||||
error: None,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
pub async fn handle_get_item(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<String>,
|
||||
Query(params): Query<TagsQuery>,
|
||||
headers: HeaderMap,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Result<Json<ApiResponse<ItemInfo>>, StatusCode> {
|
||||
if !check_auth(&headers, &state.password) {
|
||||
warn!("Unauthorized request to /item/{} from {}", item_id, addr);
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
let item = if let Ok(id) = item_id.parse::<i64>() {
|
||||
db::get_item(&mut *conn, id).map_err(|e| {
|
||||
warn!("Failed to get item {}: {}", id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
} else {
|
||||
// Try to find by tags
|
||||
if let Some(tags_str) = params.tags {
|
||||
let tags: Vec<String> = tags_str.split(',').map(|t| t.trim().to_string()).collect();
|
||||
db::get_item_matching(&mut *conn, &tags, &HashMap::new())
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get item matching tags {:?}: {}", tags, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
} else {
|
||||
warn!("Invalid item ID '{}' and no tags provided", item_id);
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(item) = item {
|
||||
let item_tags = db::get_item_tags(&mut *conn, &item)
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get tags for item {}: {}", item.id.unwrap_or(0), e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
.into_iter()
|
||||
.map(|t| t.name)
|
||||
.collect();
|
||||
let item_meta = db::get_item_meta(&mut *conn, &item)
|
||||
.map_err(|e| {
|
||||
warn!("Failed to get metadata for item {}: {}", item.id.unwrap_or(0), e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?
|
||||
.into_iter()
|
||||
.map(|m| (m.name, m.value))
|
||||
.collect();
|
||||
|
||||
let item_info = ItemInfo {
|
||||
id: item.id.unwrap_or(0),
|
||||
ts: item.ts.to_rfc3339(),
|
||||
size: item.size,
|
||||
compression: item.compression,
|
||||
tags: item_tags,
|
||||
metadata: item_meta,
|
||||
};
|
||||
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(item_info),
|
||||
error: None,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn handle_put_item(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Result<Json<ApiResponse<ItemInfo>>, StatusCode> {
|
||||
if !check_auth(&headers, &state.password) {
|
||||
warn!("Unauthorized request to PUT /item/ from {}", addr);
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// This is a simplified implementation
|
||||
// In a real implementation, you'd need to properly parse multipart/form-data
|
||||
// or JSON payload with the item data
|
||||
|
||||
let response = ApiResponse::<ItemInfo> {
|
||||
success: false,
|
||||
data: None,
|
||||
error: Some("PUT /item/ not yet implemented".to_string()),
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
pub async fn handle_delete_item(
|
||||
State(state): State<AppState>,
|
||||
Path(item_id): Path<String>,
|
||||
headers: HeaderMap,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Result<Json<ApiResponse<()>>, StatusCode> {
|
||||
if !check_auth(&headers, &state.password) {
|
||||
warn!("Unauthorized request to DELETE /item/{} from {}", item_id, addr);
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
if let Ok(id) = item_id.parse::<i64>() {
|
||||
let mut conn = state.db.lock().await;
|
||||
|
||||
if let Some(item) = db::get_item(&mut *conn, id).map_err(|e| {
|
||||
warn!("Failed to get item {} for deletion: {}", id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})? {
|
||||
db::delete_item(&mut *conn, item).map_err(|e| {
|
||||
warn!("Failed to delete item {}: {}", id, e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let response = ApiResponse::<()> {
|
||||
success: true,
|
||||
data: None,
|
||||
error: None,
|
||||
};
|
||||
Ok(Json(response))
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
} else {
|
||||
Err(StatusCode::BAD_REQUEST)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_items_openapi_spec() -> serde_json::Value {
|
||||
json!({
|
||||
"/item/": {
|
||||
"get": {
|
||||
"summary": "List items",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "tags",
|
||||
"in": "query",
|
||||
"schema": {"type": "string"},
|
||||
"description": "Comma-separated list of tags to filter by"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of items",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/components/schemas/ItemInfo"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"put": {
|
||||
"summary": "Add new item",
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Item created",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"$ref": "#/components/schemas/ItemInfo"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/item/{id}": {
|
||||
"get": {
|
||||
"summary": "Get item by ID",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {"type": "string"},
|
||||
"description": "Item ID or use tags query parameter"
|
||||
},
|
||||
{
|
||||
"name": "tags",
|
||||
"in": "query",
|
||||
"schema": {"type": "string"},
|
||||
"description": "Comma-separated list of tags (when ID is not numeric)"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Item information",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"$ref": "#/components/schemas/ItemInfo"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {"description": "Item not found"}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"summary": "Delete item by ID",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {"type": "integer"}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {"description": "Item deleted"},
|
||||
"404": {"description": "Item not found"}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
83
src/modes/server/mcp/mod.rs
Normal file
83
src/modes/server/mcp/mod.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
pub mod server;
|
||||
pub mod tools;
|
||||
|
||||
pub use server::KeepMcpServer;
|
||||
|
||||
/// Module for handling MCP (Model Context Protocol) requests in the server.
|
||||
///
|
||||
/// Provides handlers for JSON-RPC style requests to interact with Keep's storage
|
||||
/// via the API.
|
||||
use axum::{Json, extract::State, http::StatusCode, response::IntoResponse};
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::modes::server::common::ApiResponse;
|
||||
use crate::modes::server::common::AppState;
|
||||
|
||||
/// Request structure for MCP JSON-RPC calls.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `method` - The MCP method name (e.g., "save_item").
|
||||
/// * `params` - Optional JSON parameters for the method.
|
||||
#[derive(Deserialize)]
|
||||
pub struct McpRequest {
|
||||
pub method: String,
|
||||
pub params: Option<Value>,
|
||||
}
|
||||
|
||||
/// Handles an MCP request via the Axum framework.
|
||||
///
|
||||
/// Parses the JSON request, delegates to `KeepMcpServer`, and returns an API response.
|
||||
/// Attempts to parse the result as JSON; falls back to string if invalid.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `State(state)` - The application state.
|
||||
/// * `Json(request)` - The deserialized MCP request.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// An `IntoResponse` with status code and JSON API response.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns 400 Bad Request on handler errors.
|
||||
pub async fn handle_mcp_request(
|
||||
State(state): State<AppState>,
|
||||
Json(request): Json<McpRequest>,
|
||||
) -> impl IntoResponse {
|
||||
let mcp_server = KeepMcpServer::new(state);
|
||||
|
||||
match mcp_server
|
||||
.handle_request(&request.method, request.params)
|
||||
.await
|
||||
{
|
||||
Ok(result) => match serde_json::from_str(&result) {
|
||||
Ok(parsed_result) => {
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(parsed_result),
|
||||
error: None,
|
||||
};
|
||||
(StatusCode::OK, Json(response))
|
||||
}
|
||||
Err(_) => {
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(serde_json::Value::String(result)),
|
||||
error: None,
|
||||
};
|
||||
(StatusCode::OK, Json(response))
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
let response = ApiResponse {
|
||||
success: false,
|
||||
data: None,
|
||||
error: Some(e.to_string()),
|
||||
};
|
||||
(StatusCode::BAD_REQUEST, Json(response))
|
||||
}
|
||||
}
|
||||
}
|
||||
83
src/modes/server/mcp/server.rs
Normal file
83
src/modes/server/mcp/server.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use log::debug;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::tools::{KeepTools, ToolError};
|
||||
use crate::modes::server::common::AppState;
|
||||
|
||||
/// Server handler for MCP (Model Context Protocol) requests.
|
||||
///
|
||||
/// Routes requests to appropriate tools and handles responses. Clones AppState for tool usage.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `state` - The shared application state (DB, config, etc.).
|
||||
#[derive(Clone)]
|
||||
pub struct KeepMcpServer {
|
||||
state: AppState,
|
||||
}
|
||||
|
||||
/// Creates a new `KeepMcpServer` instance.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `state` - The application state containing DB, config, and services.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `KeepMcpServer` instance.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let server = KeepMcpServer::new(app_state);
|
||||
/// ```
|
||||
impl KeepMcpServer {
|
||||
pub fn new(state: AppState) -> Self {
|
||||
Self { state }
|
||||
}
|
||||
|
||||
/// Handles an MCP request by routing to the appropriate tool.
|
||||
///
|
||||
/// Supports methods like "save_item", "get_item", "list_items". Logs the request and delegates to KeepTools.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `method` - The MCP method name (string).
|
||||
/// * `params` - Optional JSON parameters as serde_json::Value.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `Ok(String)` with JSON-serialized response on success, or `Err(ToolError)` on failure.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * ToolError::UnknownTool if method unsupported.
|
||||
/// * Propagates tool-specific errors (e.g., invalid args, DB failures).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let result = server.handle_request("save_item", Some(params)).await?;
|
||||
/// ```
|
||||
pub async fn handle_request(
|
||||
&self,
|
||||
method: &str,
|
||||
params: Option<Value>,
|
||||
) -> Result<String, ToolError> {
|
||||
debug!(
|
||||
"MCP: Handling request '{}' with params: {:?}",
|
||||
method, params
|
||||
);
|
||||
|
||||
let tools = KeepTools::new(self.state.clone());
|
||||
|
||||
match method {
|
||||
"save_item" => tools.save_item(params).await,
|
||||
"get_item" => tools.get_item(params).await,
|
||||
"get_latest_item" => tools.get_latest_item(params).await,
|
||||
"list_items" => tools.list_items(params).await,
|
||||
"search_items" => tools.search_items(params).await,
|
||||
_ => Err(ToolError::UnknownTool(method.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
344
src/modes/server/mcp/tools.rs
Normal file
344
src/modes/server/mcp/tools.rs
Normal file
@@ -0,0 +1,344 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use log::debug;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::modes::server::common::AppState;
|
||||
use crate::services::async_item_service::AsyncItemService;
|
||||
use crate::services::error::CoreError;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ToolError {
|
||||
#[error("Unknown tool: {0}")]
|
||||
UnknownTool(String),
|
||||
#[error("Invalid arguments: {0}")]
|
||||
InvalidArguments(String),
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] rusqlite::Error),
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("JSON error: {0}")]
|
||||
Json(#[from] serde_json::Error),
|
||||
#[error("Parse error: {0}")]
|
||||
Parse(#[from] strum::ParseError),
|
||||
#[error("Other error: {0}")]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
pub struct KeepTools {
|
||||
state: AppState,
|
||||
}
|
||||
|
||||
impl KeepTools {
|
||||
pub fn new(state: AppState) -> Self {
|
||||
Self { state }
|
||||
}
|
||||
|
||||
pub async fn save_item(&self, args: Option<Value>) -> Result<String, ToolError> {
|
||||
let args =
|
||||
args.ok_or_else(|| ToolError::InvalidArguments("Missing arguments".to_string()))?;
|
||||
|
||||
let content = args
|
||||
.get("content")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| ToolError::InvalidArguments("Missing 'content' field".to_string()))?;
|
||||
|
||||
let tags: Vec<String> = args
|
||||
.get("tags")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|v| v.as_str().map(|s| s.to_string()))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let metadata: HashMap<String, String> = args
|
||||
.get("metadata")
|
||||
.and_then(|v| v.as_object())
|
||||
.map(|obj| {
|
||||
obj.iter()
|
||||
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
debug!(
|
||||
"MCP: Saving item with {} bytes, {} tags, {} metadata entries",
|
||||
content.len(),
|
||||
tags.len(),
|
||||
metadata.len()
|
||||
);
|
||||
|
||||
let service = AsyncItemService::new(
|
||||
self.state.data_dir.clone(),
|
||||
self.state.db.clone(),
|
||||
self.state.item_service.clone(),
|
||||
self.state.cmd.clone(),
|
||||
self.state.settings.clone(),
|
||||
);
|
||||
let item_with_meta = service
|
||||
.save_item_from_mcp(content.as_bytes().to_vec(), tags, metadata)
|
||||
.await
|
||||
.map_err(|e| ToolError::Other(anyhow::Error::from(e)))?;
|
||||
|
||||
let item_id = item_with_meta
|
||||
.item
|
||||
.id
|
||||
.ok_or_else(|| anyhow!("Failed to get item ID"))?;
|
||||
|
||||
Ok(format!("Successfully saved item with ID: {}", item_id))
|
||||
}
|
||||
|
||||
pub async fn get_item(&self, args: Option<Value>) -> Result<String, ToolError> {
|
||||
let args =
|
||||
args.ok_or_else(|| ToolError::InvalidArguments("Missing arguments".to_string()))?;
|
||||
|
||||
let item_id = args.get("id").and_then(|v| v.as_i64()).ok_or_else(|| {
|
||||
ToolError::InvalidArguments("Missing or invalid 'id' field".to_string())
|
||||
})?;
|
||||
|
||||
let service = AsyncItemService::new(
|
||||
self.state.data_dir.clone(),
|
||||
self.state.db.clone(),
|
||||
self.state.item_service.clone(),
|
||||
self.state.cmd.clone(),
|
||||
self.state.settings.clone(),
|
||||
);
|
||||
|
||||
let item_with_content = match service.get_item_content(item_id).await {
|
||||
Ok(iwc) => iwc,
|
||||
Err(CoreError::ItemNotFound(_)) => {
|
||||
return Err(ToolError::InvalidArguments(format!(
|
||||
"Item {} not found",
|
||||
item_id
|
||||
)));
|
||||
}
|
||||
Err(e) => return Err(ToolError::Other(anyhow::Error::from(e))),
|
||||
};
|
||||
|
||||
let content = String::from_utf8_lossy(&item_with_content.content).to_string();
|
||||
let tags: Vec<String> = item_with_content
|
||||
.item_with_meta
|
||||
.tags
|
||||
.iter()
|
||||
.map(|t| t.name.clone())
|
||||
.collect();
|
||||
let metadata = item_with_content.item_with_meta.meta_as_map();
|
||||
let item = item_with_content.item_with_meta.item;
|
||||
|
||||
let response = serde_json::json!({
|
||||
"id": item_id,
|
||||
"content": content,
|
||||
"timestamp": item.ts.to_rfc3339(),
|
||||
"size": item.size,
|
||||
"compression": item.compression,
|
||||
"tags": tags,
|
||||
"metadata": metadata,
|
||||
});
|
||||
|
||||
Ok(serde_json::to_string_pretty(&response)?)
|
||||
}
|
||||
|
||||
pub async fn get_latest_item(&self, args: Option<Value>) -> Result<String, ToolError> {
|
||||
let tags: Vec<String> = args
|
||||
.as_ref()
|
||||
.and_then(|v| v.get("tags"))
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|v| v.as_str().map(|s| s.to_string()))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let service = AsyncItemService::new(
|
||||
self.state.data_dir.clone(),
|
||||
self.state.db.clone(),
|
||||
self.state.item_service.clone(),
|
||||
self.state.cmd.clone(),
|
||||
self.state.settings.clone(),
|
||||
);
|
||||
|
||||
let item_with_meta = match service.find_item(vec![], tags, HashMap::new()).await {
|
||||
Ok(iwm) => iwm,
|
||||
Err(CoreError::ItemNotFoundGeneric) => {
|
||||
return Err(ToolError::InvalidArguments("No items found".to_string()));
|
||||
}
|
||||
Err(e) => return Err(ToolError::Other(anyhow::Error::from(e))),
|
||||
};
|
||||
|
||||
let item_id = item_with_meta
|
||||
.item
|
||||
.id
|
||||
.ok_or_else(|| anyhow!("Item missing ID after find"))?;
|
||||
let item_with_content = service
|
||||
.get_item_content(item_id)
|
||||
.await
|
||||
.map_err(|e| ToolError::Other(anyhow::Error::from(e)))?;
|
||||
|
||||
let content = String::from_utf8_lossy(&item_with_content.content).to_string();
|
||||
let tags: Vec<String> = item_with_content
|
||||
.item_with_meta
|
||||
.tags
|
||||
.iter()
|
||||
.map(|t| t.name.clone())
|
||||
.collect();
|
||||
let metadata = item_with_content.item_with_meta.meta_as_map();
|
||||
let item = item_with_content.item_with_meta.item;
|
||||
|
||||
let response = serde_json::json!({
|
||||
"id": item_id,
|
||||
"content": content,
|
||||
"timestamp": item.ts.to_rfc3339(),
|
||||
"size": item.size,
|
||||
"compression": item.compression,
|
||||
"tags": tags,
|
||||
"metadata": metadata,
|
||||
});
|
||||
|
||||
Ok(serde_json::to_string_pretty(&response)?)
|
||||
}
|
||||
|
||||
pub async fn list_items(&self, args: Option<Value>) -> Result<String, ToolError> {
|
||||
let args_ref = args.as_ref();
|
||||
let tags: Vec<String> = args_ref
|
||||
.and_then(|v| v.get("tags"))
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|v| v.as_str().map(|s| s.to_string()))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let limit = args_ref
|
||||
.and_then(|v| v.get("limit"))
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(10) as usize;
|
||||
|
||||
let offset = args_ref
|
||||
.and_then(|v| v.get("offset"))
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(0) as usize;
|
||||
|
||||
let service = AsyncItemService::new(
|
||||
self.state.data_dir.clone(),
|
||||
self.state.db.clone(),
|
||||
self.state.item_service.clone(),
|
||||
self.state.cmd.clone(),
|
||||
self.state.settings.clone(),
|
||||
);
|
||||
let mut items_with_meta = service
|
||||
.list_items(tags, HashMap::new())
|
||||
.await
|
||||
.map_err(|e| ToolError::Other(anyhow::Error::from(e)))?;
|
||||
|
||||
// Sort by timestamp (newest first) and apply pagination
|
||||
items_with_meta.sort_by(|a, b| b.item.ts.cmp(&a.item.ts));
|
||||
let items_with_meta: Vec<_> = items_with_meta
|
||||
.into_iter()
|
||||
.skip(offset)
|
||||
.take(limit)
|
||||
.collect();
|
||||
|
||||
let items_info: Vec<_> = items_with_meta
|
||||
.into_iter()
|
||||
.map(|item_with_meta| {
|
||||
let item_tags: Vec<String> =
|
||||
item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let item_meta = item_with_meta.meta_as_map();
|
||||
let item = item_with_meta.item;
|
||||
let item_id = item.id.unwrap_or(0);
|
||||
|
||||
serde_json::json!({
|
||||
"id": item_id,
|
||||
"timestamp": item.ts.to_rfc3339(),
|
||||
"size": item.size,
|
||||
"compression": item.compression,
|
||||
"tags": item_tags,
|
||||
"metadata": item_meta
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let response = serde_json::json!({
|
||||
"items": items_info,
|
||||
"count": items_info.len(),
|
||||
"offset": offset,
|
||||
"limit": limit
|
||||
});
|
||||
|
||||
Ok(serde_json::to_string_pretty(&response)?)
|
||||
}
|
||||
|
||||
pub async fn search_items(&self, args: Option<Value>) -> Result<String, ToolError> {
|
||||
let tags: Vec<String> = args
|
||||
.as_ref()
|
||||
.and_then(|v| v.get("tags"))
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|v| v.as_str().map(|s| s.to_string()))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let metadata: HashMap<String, String> = args
|
||||
.as_ref()
|
||||
.and_then(|v| v.get("metadata"))
|
||||
.and_then(|v| v.as_object())
|
||||
.map(|obj| {
|
||||
obj.iter()
|
||||
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let service = AsyncItemService::new(
|
||||
self.state.data_dir.clone(),
|
||||
self.state.db.clone(),
|
||||
self.state.item_service.clone(),
|
||||
self.state.cmd.clone(),
|
||||
self.state.settings.clone(),
|
||||
);
|
||||
let mut items_with_meta = service
|
||||
.list_items(tags.clone(), metadata.clone())
|
||||
.await
|
||||
.map_err(|e| ToolError::Other(anyhow::Error::from(e)))?;
|
||||
|
||||
// Sort by timestamp (newest first)
|
||||
items_with_meta.sort_by(|a, b| b.item.ts.cmp(&a.item.ts));
|
||||
|
||||
let items_info: Vec<_> = items_with_meta
|
||||
.into_iter()
|
||||
.map(|item_with_meta| {
|
||||
let item_tags: Vec<String> =
|
||||
item_with_meta.tags.iter().map(|t| t.name.clone()).collect();
|
||||
let item_meta = item_with_meta.meta_as_map();
|
||||
let item = item_with_meta.item;
|
||||
let item_id = item.id.unwrap_or(0);
|
||||
|
||||
serde_json::json!({
|
||||
"id": item_id,
|
||||
"timestamp": item.ts.to_rfc3339(),
|
||||
"size": item.size,
|
||||
"compression": item.compression,
|
||||
"tags": item_tags,
|
||||
"metadata": item_meta
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let response = serde_json::json!({
|
||||
"items": items_info,
|
||||
"count": items_info.len(),
|
||||
"search_criteria": {
|
||||
"tags": tags,
|
||||
"metadata": metadata
|
||||
}
|
||||
});
|
||||
|
||||
Ok(serde_json::to_string_pretty(&response)?)
|
||||
}
|
||||
}
|
||||
150
src/modes/server/mod.rs
Normal file
150
src/modes/server/mod.rs
Normal file
@@ -0,0 +1,150 @@
|
||||
use crate::config;
|
||||
use crate::services::item_service::ItemService;
|
||||
use anyhow::Result;
|
||||
use axum::{Router, routing::post};
|
||||
use clap::Command;
|
||||
use log::{debug, info};
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::cors::CorsLayer;
|
||||
use tower_http::trace::TraceLayer;
|
||||
|
||||
mod api;
|
||||
pub mod common;
|
||||
#[cfg(feature = "mcp")]
|
||||
mod mcp;
|
||||
mod pages;
|
||||
|
||||
pub use common::{AppState, create_auth_middleware, logging_middleware};
|
||||
|
||||
pub fn mode_server(
|
||||
cmd: &mut Command,
|
||||
settings: &config::Settings,
|
||||
conn: &mut rusqlite::Connection,
|
||||
data_path: PathBuf,
|
||||
) -> Result<()> {
|
||||
// Get server address from args or config with default
|
||||
let server_address = if let Some(addr) = &settings.server_address() {
|
||||
addr.clone()
|
||||
} else if let Some(server_config) = &settings.server {
|
||||
server_config
|
||||
.address
|
||||
.clone()
|
||||
.unwrap_or_else(|| "127.0.0.1".to_string())
|
||||
} else {
|
||||
"127.0.0.1".to_string()
|
||||
};
|
||||
|
||||
// Get server port from args or config with default
|
||||
let server_port = if let Some(port) = settings.server_port() {
|
||||
port
|
||||
} else if let Some(server_config) = &settings.server {
|
||||
server_config.port.unwrap_or(21080)
|
||||
} else {
|
||||
21080
|
||||
};
|
||||
|
||||
let server_config = common::ServerConfig {
|
||||
address: server_address,
|
||||
port: Some(server_port),
|
||||
password: settings.server_password(),
|
||||
password_hash: settings.server_password_hash(),
|
||||
};
|
||||
|
||||
// Create ItemService once
|
||||
let item_service = ItemService::new(data_path.clone());
|
||||
|
||||
// We need to move the connection into the async runtime
|
||||
let rt = tokio::runtime::Runtime::new()?;
|
||||
// Take ownership of the connection and move it into the async runtime
|
||||
let owned_conn = std::mem::replace(conn, rusqlite::Connection::open_in_memory()?);
|
||||
let cmd = cmd.clone();
|
||||
let settings = settings.clone();
|
||||
rt.block_on(run_server(
|
||||
server_config,
|
||||
owned_conn,
|
||||
data_path,
|
||||
item_service,
|
||||
cmd,
|
||||
settings,
|
||||
))
|
||||
}
|
||||
|
||||
async fn run_server(
|
||||
config: common::ServerConfig,
|
||||
conn: rusqlite::Connection,
|
||||
data_dir: PathBuf,
|
||||
item_service: ItemService,
|
||||
_cmd: Command,
|
||||
settings: config::Settings,
|
||||
) -> Result<()> {
|
||||
// Construct address with port
|
||||
let bind_address = if let Some(port) = config.port {
|
||||
format!("{}:{}", config.address, port)
|
||||
} else {
|
||||
format!("{}:21080", config.address)
|
||||
};
|
||||
|
||||
debug!("SERVER: Starting REST HTTP server on {}", bind_address);
|
||||
|
||||
// Use the existing database connection
|
||||
let db_conn = Arc::new(Mutex::new(conn));
|
||||
|
||||
let state = AppState {
|
||||
db: db_conn,
|
||||
data_dir: data_dir.clone(),
|
||||
item_service: Arc::new(item_service),
|
||||
cmd: Arc::new(Mutex::new(Command::new("keep"))),
|
||||
settings: Arc::new(settings.clone()),
|
||||
};
|
||||
|
||||
#[cfg(feature = "mcp")]
|
||||
let mcp_router = Router::new()
|
||||
.route("/mcp", post(mcp::handle_mcp_request))
|
||||
.with_state(state.clone());
|
||||
|
||||
let mut protected_router = Router::new()
|
||||
.merge(api::add_routes(Router::new()))
|
||||
.merge(pages::add_routes(Router::new()));
|
||||
|
||||
#[cfg(feature = "mcp")]
|
||||
{
|
||||
protected_router = protected_router.merge(mcp_router);
|
||||
}
|
||||
|
||||
let protected_router = protected_router.layer(axum::middleware::from_fn(
|
||||
create_auth_middleware(config.password.clone(), config.password_hash.clone()),
|
||||
));
|
||||
|
||||
// Create the app with documentation routes open and others protected
|
||||
let app = Router::new()
|
||||
// Add documentation routes without authentication
|
||||
.merge(api::add_docs_routes(Router::new()))
|
||||
// Add API, pages, and MCP routes with authentication
|
||||
.merge(protected_router)
|
||||
// Apply state to all routes
|
||||
.with_state(state)
|
||||
// Add other middleware layers to all routes
|
||||
.layer(axum::middleware::from_fn(logging_middleware))
|
||||
.layer(
|
||||
ServiceBuilder::new()
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.layer(CorsLayer::permissive()),
|
||||
);
|
||||
|
||||
let addr: SocketAddr = bind_address.parse()?;
|
||||
|
||||
info!("SERVER: HTTP server listening on {}", addr);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(addr).await?;
|
||||
axum::serve(
|
||||
listener,
|
||||
app.into_make_service_with_connect_info::<SocketAddr>(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
450
src/modes/server/pages.rs
Normal file
450
src/modes/server/pages.rs
Normal file
@@ -0,0 +1,450 @@
|
||||
use crate::config::ColumnConfig;
|
||||
use crate::db;
|
||||
use crate::modes::server::AppState;
|
||||
use anyhow::Result;
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
response::{Html, Response},
|
||||
};
|
||||
use log::debug;
|
||||
use rusqlite::Connection;
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
/// Query parameters for the item list endpoint.
|
||||
///
|
||||
/// This struct defines the query parameters used to filter, sort, and paginate
|
||||
/// the list of items displayed on the main page.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `sort` - Sorting order, defaults to "newest".
|
||||
/// * `tags` - Optional comma-separated list of tags to filter by.
|
||||
/// * `count` - Number of items per page, defaults to 1000.
|
||||
/// * `start` - Starting index for pagination, defaults to 0.
|
||||
pub struct ListQueryParams {
|
||||
#[serde(default = "default_sort")]
|
||||
sort: String,
|
||||
#[serde(default)]
|
||||
tags: Option<String>,
|
||||
#[serde(default = "default_count")]
|
||||
count: usize,
|
||||
#[serde(default)]
|
||||
start: usize,
|
||||
}
|
||||
|
||||
fn default_sort() -> String {
|
||||
"newest".to_string()
|
||||
}
|
||||
|
||||
/// Provides the default sorting order for item lists.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A string representing the default sort order: "newest".
|
||||
fn default_count() -> usize {
|
||||
1000
|
||||
}
|
||||
|
||||
/// Provides the default number of items to display per page.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The default count: 1000.
|
||||
|
||||
/// Adds the web page routes to the Axum router.
|
||||
///
|
||||
/// This function configures the routes for the web interface, including the
|
||||
/// main item list, individual item details, and static CSS styles.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `app` - The existing Axum router with application state.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The updated router with web routes added.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let app = pages::add_routes(axum::Router::new());
|
||||
/// ```
|
||||
pub fn add_routes(app: axum::Router<AppState>) -> axum::Router<AppState> {
|
||||
app.route("/", axum::routing::get(list_items))
|
||||
.route("/item/{item_id}", axum::routing::get(show_item))
|
||||
.route("/style.css", axum::routing::get(style_css))
|
||||
}
|
||||
|
||||
async fn list_items(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<ListQueryParams>,
|
||||
) -> Result<Response, Html<String>> {
|
||||
let conn = state.db.lock().await;
|
||||
let settings = &state.settings;
|
||||
|
||||
let result = build_item_list(&conn, ¶ms, &settings.list_format);
|
||||
|
||||
match result {
|
||||
Ok(html) => {
|
||||
// Build response with explicit Content-Length
|
||||
let response = Response::builder()
|
||||
.header("content-type", "text/html")
|
||||
.header("content-length", html.len().to_string())
|
||||
.body(axum::body::Body::from(html))
|
||||
.map_err(|_| Html("<html><body>Internal Server Error</body></html>".to_string()))?;
|
||||
Ok(response)
|
||||
}
|
||||
Err(e) => Err(Html(format!("<html><body>Error: {}</body></html>", e))),
|
||||
}
|
||||
}
|
||||
|
||||
fn build_item_list(
|
||||
conn: &Connection,
|
||||
params: &ListQueryParams,
|
||||
columns: &[ColumnConfig],
|
||||
) -> Result<String> {
|
||||
let tags: Vec<String> = params
|
||||
.tags
|
||||
.as_ref()
|
||||
.map(|t| t.split(',').map(|s| s.trim().to_string()).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
let items = if tags.is_empty() {
|
||||
db::query_all_items(conn)?
|
||||
} else {
|
||||
db::query_tagged_items(conn, &tags)?
|
||||
};
|
||||
|
||||
// Sort items
|
||||
let mut sorted_items = items;
|
||||
if params.sort == "newest" {
|
||||
sorted_items.sort_by(|a, b| b.id.cmp(&a.id));
|
||||
} else {
|
||||
sorted_items.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
}
|
||||
|
||||
// Apply pagination
|
||||
let start = params.start;
|
||||
let end = std::cmp::min(start + params.count, sorted_items.len());
|
||||
let page_items = if start < sorted_items.len() {
|
||||
sorted_items[start..std::cmp::min(end, sorted_items.len())].to_vec()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
// Get tags and meta for all items in the page
|
||||
let item_ids: Vec<i64> = page_items.iter().filter_map(|item| item.id).collect();
|
||||
let tags_map = db::get_tags_for_items(conn, &item_ids)?;
|
||||
let meta_map = db::get_meta_for_items(conn, &item_ids)?;
|
||||
|
||||
// Debug: print number of tags per item
|
||||
for item_id in &item_ids {
|
||||
if let Some(tags) = tags_map.get(item_id) {
|
||||
debug!("Item {} has {} tags: {:?}", item_id, tags.len(), tags);
|
||||
}
|
||||
}
|
||||
|
||||
let mut html = String::new();
|
||||
html.push_str("<html><head><title>Keep - Items</title>");
|
||||
html.push_str("<link rel=\"stylesheet\" href=\"/style.css\">");
|
||||
html.push_str("</head><body>");
|
||||
html.push_str("<h1>Items</h1>");
|
||||
html.push_str("<p><a href=\"/swagger\">API Documentation</a></p>");
|
||||
|
||||
// Add recent tags section using the items we already have
|
||||
html.push_str("<h2>Recent Tags</h2>");
|
||||
|
||||
// Collect all tags from all items, keeping track of their timestamps
|
||||
let mut all_tags_with_time: Vec<(String, chrono::DateTime<chrono::Utc>)> = Vec::new();
|
||||
for item in &sorted_items {
|
||||
if let Some(item_id) = item.id {
|
||||
if let Some(tags) = tags_map.get(&item_id) {
|
||||
for tag in tags {
|
||||
all_tags_with_time.push((tag.name.clone(), item.ts));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp descending (most recent first)
|
||||
all_tags_with_time.sort_by(|a, b| b.1.cmp(&a.1));
|
||||
|
||||
// Get unique tags in order of most recent appearance
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
let mut recent_tags = Vec::new();
|
||||
for (tag, _) in all_tags_with_time {
|
||||
if !seen.contains(&tag) {
|
||||
seen.insert(tag.clone());
|
||||
recent_tags.push(tag);
|
||||
if recent_tags.len() >= 20 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if recent_tags.is_empty() {
|
||||
html.push_str("<p>No tags found</p>");
|
||||
} else {
|
||||
html.push_str("<p>");
|
||||
for tag in recent_tags {
|
||||
html.push_str(&format!(
|
||||
"<a href=\"/?tags={}\" style=\"margin-right: 8px;\">{}</a>",
|
||||
tag, tag
|
||||
));
|
||||
}
|
||||
html.push_str("</p>");
|
||||
}
|
||||
|
||||
// Start table
|
||||
html.push_str("<table>");
|
||||
|
||||
// Table headers
|
||||
html.push_str("<tr>");
|
||||
for column in columns {
|
||||
html.push_str(&format!("<th>{}</th>", column.label));
|
||||
}
|
||||
html.push_str("<th>Actions</th>");
|
||||
html.push_str("</tr>");
|
||||
|
||||
// Table rows
|
||||
for item in page_items {
|
||||
let item_id = item.id.unwrap_or(0);
|
||||
let tags = tags_map.get(&item_id).cloned().unwrap_or_default();
|
||||
let meta: HashMap<String, String> = meta_map
|
||||
.get(&item_id)
|
||||
.map(|metas| {
|
||||
metas
|
||||
.iter()
|
||||
.map(|(name, value)| (name.clone(), value.clone()))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
html.push_str("<tr>");
|
||||
for column in columns {
|
||||
let value = match column.name.as_str() {
|
||||
"id" => {
|
||||
let id_value = item.id.map(|id| id.to_string()).unwrap_or_default();
|
||||
// Make the ID a link to the item details page
|
||||
format!("<a href=\"/item/{}\">{}</a>", item_id, id_value)
|
||||
}
|
||||
"time" => item.ts.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
"size" => item.size.map(|s| s.to_string()).unwrap_or_default(),
|
||||
"tags" => {
|
||||
// Make sure we're using all tags for the item
|
||||
let tag_links: Vec<String> = tags
|
||||
.iter()
|
||||
.map(|t| format!("<a href=\"/?tags={}\">{}</a>", t.name, t.name))
|
||||
.collect();
|
||||
tag_links.join(", ")
|
||||
}
|
||||
_ => {
|
||||
if column.name.starts_with("meta:") {
|
||||
let meta_key = &column.name[5..];
|
||||
meta.get(meta_key).cloned().unwrap_or_default()
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Apply max_len if specified, but skip for tags column to avoid truncating HTML
|
||||
let display_value = if column.name == "tags" {
|
||||
value
|
||||
} else if let Some(max_len_str) = &column.max_len {
|
||||
if let Ok(max_len) = max_len_str.parse::<usize>() {
|
||||
if value.chars().count() > max_len {
|
||||
let truncated: String = value.chars().take(max_len).collect();
|
||||
format!("{}...", truncated)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
} else {
|
||||
value
|
||||
}
|
||||
} else {
|
||||
value
|
||||
};
|
||||
|
||||
// Apply alignment
|
||||
let align_style = match column.align {
|
||||
crate::config::ColumnAlignment::Left => "text-align: left;",
|
||||
crate::config::ColumnAlignment::Right => "text-align: right;",
|
||||
crate::config::ColumnAlignment::Center => "text-align: center;",
|
||||
};
|
||||
|
||||
html.push_str(&format!(
|
||||
"<td style=\"{}\">{}</td>",
|
||||
align_style, display_value
|
||||
));
|
||||
}
|
||||
|
||||
// Actions column
|
||||
html.push_str(&format!(
|
||||
"<td><a href=\"/item/{}\">View</a> | <a href=\"/api/item/{}/content\">Download</a></td>",
|
||||
item_id, item_id
|
||||
));
|
||||
|
||||
html.push_str("</tr>");
|
||||
}
|
||||
|
||||
html.push_str("</table>");
|
||||
|
||||
// Add pagination info
|
||||
html.push_str(&format!(
|
||||
"<p>Showing {} to {} of {} items</p>",
|
||||
start + 1,
|
||||
std::cmp::min(end, sorted_items.len()),
|
||||
sorted_items.len()
|
||||
));
|
||||
|
||||
html.push_str("</body></html>");
|
||||
|
||||
Ok(html)
|
||||
}
|
||||
|
||||
async fn style_css() -> &'static str {
|
||||
r#"
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
h1, h2 {
|
||||
color: #333;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
background-color: white;
|
||||
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
|
||||
}
|
||||
th, td {
|
||||
border: 1px solid #ddd;
|
||||
padding: 12px;
|
||||
text-align: left;
|
||||
}
|
||||
th {
|
||||
background-color: #f2f2f2;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
}
|
||||
tr:nth-child(even) {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
tr:hover {
|
||||
background-color: #f1f1f1;
|
||||
}
|
||||
a {
|
||||
color: #0066cc;
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
.pagination {
|
||||
margin: 20px 0;
|
||||
}
|
||||
.actions {
|
||||
white-space: nowrap;
|
||||
}
|
||||
"#
|
||||
}
|
||||
|
||||
async fn show_item(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<i64>,
|
||||
) -> Result<Response, Html<String>> {
|
||||
let conn = state.db.lock().await;
|
||||
|
||||
let result = build_item_details(&conn, id);
|
||||
|
||||
match result {
|
||||
Ok(html) => {
|
||||
// Build response with explicit Content-Length
|
||||
let response = Response::builder()
|
||||
.header("content-type", "text/html")
|
||||
.header("content-length", html.len().to_string())
|
||||
.body(axum::body::Body::from(html))
|
||||
.map_err(|_| Html("<html><body>Internal Server Error</body></html>".to_string()))?;
|
||||
Ok(response)
|
||||
}
|
||||
Err(e) => Err(Html(format!("<html><body>Error: {}</body></html>", e))),
|
||||
}
|
||||
}
|
||||
|
||||
fn build_item_details(conn: &Connection, id: i64) -> Result<String> {
|
||||
let item = match db::get_item(conn, id)? {
|
||||
Some(item) => item,
|
||||
None => return Err(anyhow::anyhow!("Item not found")),
|
||||
};
|
||||
|
||||
let tags = db::get_item_tags(conn, &item)?;
|
||||
let metas = db::get_item_meta(conn, &item)?;
|
||||
|
||||
let mut html = String::new();
|
||||
html.push_str(&format!("<html><head><title>Keep - Item #{}</title>", id));
|
||||
html.push_str("<link rel=\"stylesheet\" href=\"/style.css\">");
|
||||
html.push_str("</head><body>");
|
||||
html.push_str(&format!("<h1>Item #{}</h1>", id));
|
||||
|
||||
// Single table for all details
|
||||
html.push_str("<table>");
|
||||
html.push_str(&format!(
|
||||
"<tr><th>ID</th><td>{}</td></tr>",
|
||||
item.id.unwrap_or(0)
|
||||
));
|
||||
html.push_str(&format!(
|
||||
"<tr><th>Timestamp</th><td>{}</td></tr>",
|
||||
item.ts.format("%Y-%m-%d %H:%M:%S")
|
||||
));
|
||||
html.push_str(&format!(
|
||||
"<tr><th>Size</th><td>{}</td></tr>",
|
||||
item.size.unwrap_or(0)
|
||||
));
|
||||
html.push_str(&format!(
|
||||
"<tr><th>Compression</th><td>{}</td></tr>",
|
||||
item.compression
|
||||
));
|
||||
|
||||
// Tags row
|
||||
html.push_str("<tr><th>Tags</th><td>");
|
||||
if tags.is_empty() {
|
||||
html.push_str("No tags");
|
||||
} else {
|
||||
let tag_links: Vec<String> = tags
|
||||
.iter()
|
||||
.map(|t| format!("<a href=\"/?tags={}\">{}</a>", t.name, t.name))
|
||||
.collect();
|
||||
html.push_str(&tag_links.join(", "));
|
||||
}
|
||||
html.push_str("</td></tr>");
|
||||
|
||||
// Metadata rows
|
||||
if metas.is_empty() {
|
||||
html.push_str("<tr><th>Metadata</th><td>No metadata</td></tr>");
|
||||
} else {
|
||||
for meta in metas {
|
||||
html.push_str(&format!(
|
||||
"<tr><th>{}</th><td>{}</td></tr>",
|
||||
meta.name, meta.value
|
||||
));
|
||||
}
|
||||
}
|
||||
html.push_str("</table>");
|
||||
|
||||
// Links
|
||||
html.push_str("<h2>Actions</h2>");
|
||||
html.push_str(&format!(
|
||||
"<p><a href=\"/api/item/{}/content\">Download Content</a></p>",
|
||||
id
|
||||
));
|
||||
html.push_str("<p><a href=\"/\">Back to list</a></p>");
|
||||
|
||||
html.push_str("</body></html>");
|
||||
|
||||
Ok(html)
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
use axum::{
|
||||
extract::{ConnectInfo, State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::Json,
|
||||
};
|
||||
use clap::Command;
|
||||
use log::warn;
|
||||
use serde_json::json;
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use crate::meta_plugin::MetaPluginType;
|
||||
use crate::modes::status::{StatusInfo, generate_status_info};
|
||||
use super::common::{AppState, ApiResponse, check_auth};
|
||||
|
||||
pub async fn handle_status(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
) -> Result<Json<ApiResponse<StatusInfo>>, StatusCode> {
|
||||
if !check_auth(&headers, &state.password) {
|
||||
warn!("Unauthorized request from {}", addr);
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// Use the actual args that the server was started with
|
||||
let args = &state.args;
|
||||
|
||||
// Determine which meta plugins would be enabled for a save operation
|
||||
let mut meta_plugin_types: Vec<MetaPluginType> = crate::modes::common::cmd_args_meta_plugin_types(&mut Command::new("keep"), args);
|
||||
|
||||
// Add digest type if specified
|
||||
let digest_type = crate::modes::common::cmd_args_digest_type(&mut Command::new("keep"), args);
|
||||
let digest_meta_plugin_type = match digest_type {
|
||||
crate::meta_plugin::MetaPluginType::DigestSha256 => Some(MetaPluginType::DigestSha256),
|
||||
crate::meta_plugin::MetaPluginType::DigestMd5 => Some(MetaPluginType::DigestMd5),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(digest_plugin_type) = digest_meta_plugin_type {
|
||||
if !meta_plugin_types.contains(&digest_plugin_type) {
|
||||
meta_plugin_types.push(digest_plugin_type);
|
||||
}
|
||||
}
|
||||
|
||||
let mut db_path = state.data_dir.clone();
|
||||
db_path.push("keep-1.db");
|
||||
|
||||
let status_info = generate_status_info(state.data_dir.clone(), db_path, &meta_plugin_types);
|
||||
|
||||
let response = ApiResponse {
|
||||
success: true,
|
||||
data: Some(status_info),
|
||||
error: None,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
pub fn get_status_openapi_spec() -> serde_json::Value {
|
||||
json!({
|
||||
"/status": {
|
||||
"get": {
|
||||
"summary": "Get system status",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "System status",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"$ref": "#/components/schemas/StatusInfo"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,294 +1,238 @@
|
||||
use clap::*;
|
||||
use is_terminal::IsTerminal;
|
||||
use log::debug;
|
||||
use std::path::PathBuf;
|
||||
use strum::IntoEnumIterator;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::compression_engine;
|
||||
use crate::compression_engine::COMPRESSION_PROGRAMS;
|
||||
use crate::compression_engine::CompressionType;
|
||||
use crate::compression_engine::program::CompressionEngineProgram;
|
||||
|
||||
use crate::modes::common::{get_format_box_chars_no_border_line_separator, get_output_format, OutputFormat};
|
||||
use prettytable::color;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::common::status::StatusInfo;
|
||||
use crate::config;
|
||||
use crate::modes::common::OutputFormat;
|
||||
use comfy_table::{Attribute, Cell, Table};
|
||||
use serde_json;
|
||||
use serde_yaml;
|
||||
use prettytable::row;
|
||||
use prettytable::{Attr, Cell, Row, Table};
|
||||
use prettytable::format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR;
|
||||
|
||||
use crate::meta_plugin;
|
||||
use crate::common::status::PathInfo;
|
||||
use crate::meta_plugin::MetaPluginType;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct StatusInfo {
|
||||
pub paths: PathInfo,
|
||||
pub compression: Vec<CompressionInfo>,
|
||||
pub meta_plugins: Vec<MetaPluginInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct PathInfo {
|
||||
pub data: String,
|
||||
pub database: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CompressionInfo {
|
||||
#[serde(rename = "type")]
|
||||
pub compression_type: String,
|
||||
pub found: bool,
|
||||
pub default: bool,
|
||||
pub binary: String,
|
||||
pub compress: String,
|
||||
pub decompress: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct MetaPluginInfo {
|
||||
pub meta_name: String,
|
||||
pub found: bool,
|
||||
pub enabled: bool,
|
||||
pub binary: String,
|
||||
pub args: String,
|
||||
}
|
||||
|
||||
pub fn generate_status_info(
|
||||
data_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
enabled_meta_plugins: &Vec<MetaPluginType>,
|
||||
) -> StatusInfo {
|
||||
let path_info = PathInfo {
|
||||
data: data_path.into_os_string().into_string().expect("Unable to convert data path to string"),
|
||||
database: db_path.into_os_string().into_string().expect("Unable to convert DB path to string"),
|
||||
};
|
||||
|
||||
let default_type = compression_engine::default_compression_type();
|
||||
let mut compression_info = Vec::new();
|
||||
|
||||
// Sort compression types by their string representation
|
||||
let mut sorted_compression_types: Vec<CompressionType> = CompressionType::iter().collect();
|
||||
sorted_compression_types.sort_by_key(|ct| ct.to_string());
|
||||
|
||||
for compression_type in sorted_compression_types {
|
||||
let compression_program: CompressionEngineProgram =
|
||||
match &COMPRESSION_PROGRAMS[compression_type.clone()] {
|
||||
Some(compression_program) => compression_program.clone(),
|
||||
None => CompressionEngineProgram {
|
||||
program: "".to_string(),
|
||||
compress: Vec::new(),
|
||||
decompress: Vec::new(),
|
||||
supported: true,
|
||||
},
|
||||
};
|
||||
|
||||
let is_default = compression_type == default_type;
|
||||
let binary = if compression_program.program.is_empty() {
|
||||
"<INTERNAL>".to_string()
|
||||
} else {
|
||||
compression_program.program
|
||||
};
|
||||
|
||||
compression_info.push(CompressionInfo {
|
||||
compression_type: compression_type.to_string(),
|
||||
found: compression_program.supported,
|
||||
default: is_default,
|
||||
binary,
|
||||
compress: compression_program.compress.join(" "),
|
||||
decompress: compression_program.decompress.join(" "),
|
||||
});
|
||||
}
|
||||
|
||||
let mut meta_plugin_info = Vec::new();
|
||||
|
||||
// Sort meta plugin types by their meta name
|
||||
let mut sorted_meta_plugins: Vec<MetaPluginType> = MetaPluginType::iter().collect();
|
||||
sorted_meta_plugins.sort_by_key(|meta_plugin_type| {
|
||||
let mut meta_plugin = meta_plugin::get_meta_plugin(meta_plugin_type.clone());
|
||||
meta_plugin.meta_name()
|
||||
});
|
||||
|
||||
for meta_plugin_type in sorted_meta_plugins {
|
||||
let mut meta_plugin = meta_plugin::get_meta_plugin(meta_plugin_type.clone());
|
||||
let is_supported = meta_plugin.is_supported();
|
||||
let is_enabled = enabled_meta_plugins.contains(&meta_plugin_type);
|
||||
|
||||
let (binary_display, args_display) = if !is_supported {
|
||||
("<NOT FOUND>".to_string(), "".to_string())
|
||||
} else {
|
||||
if meta_plugin.is_internal() {
|
||||
("<INTERNAL>".to_string(), "".to_string())
|
||||
} else {
|
||||
if let Some((program, args)) = meta_plugin.program_info() {
|
||||
(program.to_string(), args.join(" "))
|
||||
} else {
|
||||
("<NOT FOUND>".to_string(), "".to_string())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
meta_plugin_info.push(MetaPluginInfo {
|
||||
meta_name: meta_plugin.meta_name(),
|
||||
found: is_supported,
|
||||
enabled: is_enabled,
|
||||
binary: binary_display,
|
||||
args: args_display,
|
||||
});
|
||||
}
|
||||
|
||||
StatusInfo {
|
||||
paths: path_info,
|
||||
compression: compression_info,
|
||||
meta_plugins: meta_plugin_info,
|
||||
}
|
||||
}
|
||||
use crate::meta_plugin::get_meta_plugin;
|
||||
|
||||
fn build_path_table(path_info: &PathInfo) -> Table {
|
||||
let mut path_table = Table::new();
|
||||
let mut path_table = crate::modes::common::create_table(true);
|
||||
|
||||
if std::io::stdout().is_terminal() {
|
||||
path_table.set_format(get_format_box_chars_no_border_line_separator());
|
||||
} else {
|
||||
path_table.set_format(*FORMAT_NO_BORDER_LINE_SEPARATOR);
|
||||
}
|
||||
path_table.set_header(vec![
|
||||
Cell::new("Type").add_attribute(Attribute::Bold),
|
||||
Cell::new("Path").add_attribute(Attribute::Bold),
|
||||
]);
|
||||
|
||||
path_table.set_titles(Row::new(vec![
|
||||
Cell::new("Type").with_style(Attr::Bold),
|
||||
Cell::new("Path").with_style(Attr::Bold),
|
||||
]));
|
||||
|
||||
path_table.add_row(Row::new(vec![
|
||||
Cell::new("Data"),
|
||||
Cell::new(&path_info.data),
|
||||
]));
|
||||
|
||||
path_table.add_row(Row::new(vec![
|
||||
Cell::new("Database"),
|
||||
Cell::new(&path_info.database),
|
||||
]));
|
||||
path_table.add_row(vec!["Data", &path_info.data]);
|
||||
path_table.add_row(vec!["Database", &path_info.database]);
|
||||
|
||||
path_table
|
||||
}
|
||||
|
||||
fn build_compression_table(compression_info: &Vec<CompressionInfo>) -> Table {
|
||||
let mut compression_table = Table::new();
|
||||
if std::io::stdout().is_terminal() {
|
||||
compression_table.set_format(get_format_box_chars_no_border_line_separator());
|
||||
fn build_config_table(settings: &config::Settings) -> Table {
|
||||
let mut config_table = crate::modes::common::create_table(true);
|
||||
|
||||
config_table.set_header(vec![
|
||||
Cell::new("Setting").add_attribute(Attribute::Bold),
|
||||
Cell::new("Value").add_attribute(Attribute::Bold),
|
||||
]);
|
||||
|
||||
// Add relevant configuration settings
|
||||
config_table.add_row(vec!["Directory", &settings.dir.to_string_lossy()]);
|
||||
config_table.add_row(vec!["Human Readable", &settings.human_readable.to_string()]);
|
||||
config_table.add_row(vec!["Quiet", &settings.quiet.to_string()]);
|
||||
|
||||
if let Some(output_format) = &settings.output_format {
|
||||
config_table.add_row(vec!["Output Format", output_format]);
|
||||
}
|
||||
|
||||
if let Some(compression) = settings.compression() {
|
||||
config_table.add_row(vec!["Compression", &compression]);
|
||||
}
|
||||
|
||||
config_table
|
||||
}
|
||||
|
||||
fn build_meta_plugins_configured_table(status_info: &StatusInfo) -> Option<Table> {
|
||||
let meta_plugins = status_info.configured_meta_plugins.as_ref()?;
|
||||
if meta_plugins.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Sort meta plugins by name
|
||||
let mut sorted_meta_plugins = meta_plugins.clone();
|
||||
sorted_meta_plugins.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
|
||||
let mut table = crate::modes::common::create_table(true);
|
||||
|
||||
table.set_header(vec![
|
||||
Cell::new("Plugin Name").add_attribute(Attribute::Bold),
|
||||
Cell::new("Options").add_attribute(Attribute::Bold),
|
||||
Cell::new("Outputs").add_attribute(Attribute::Bold),
|
||||
]);
|
||||
|
||||
for plugin_config in sorted_meta_plugins {
|
||||
// Create the plugin to get its default options
|
||||
let meta_plugin_type = match MetaPluginType::from_str(&plugin_config.name) {
|
||||
Ok(plugin_type) => plugin_type,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
// First, create a default plugin to get its default options
|
||||
let default_plugin = get_meta_plugin(meta_plugin_type.clone(), None, None);
|
||||
|
||||
// Start with the default options
|
||||
let mut effective_options = default_plugin.options().clone();
|
||||
|
||||
// Merge with the configured options
|
||||
for (key, value) in &plugin_config.options {
|
||||
effective_options.insert(key.clone(), value.clone());
|
||||
}
|
||||
|
||||
// Convert outputs from HashMap<String, String> to HashMap<String, serde_yaml::Value>
|
||||
let outputs_converted: std::collections::HashMap<String, serde_yaml::Value> = plugin_config
|
||||
.outputs
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), serde_yaml::Value::String(v.clone())))
|
||||
.collect();
|
||||
|
||||
// Create the actual plugin with merged options - the constructor will handle setting up outputs
|
||||
let actual_plugin = get_meta_plugin(
|
||||
meta_plugin_type.clone(),
|
||||
Some(effective_options.clone()),
|
||||
Some(outputs_converted),
|
||||
);
|
||||
|
||||
// Get the default plugin to see its default options
|
||||
let default_plugin = get_meta_plugin(meta_plugin_type.clone(), None, None);
|
||||
|
||||
// Start with the default options
|
||||
let mut all_options = default_plugin.options().clone();
|
||||
// Merge with the configured options
|
||||
for (key, value) in &effective_options {
|
||||
all_options.insert(key.clone(), value.clone());
|
||||
}
|
||||
|
||||
// Sort options by key and convert to a YAML string
|
||||
let mut sorted_options: Vec<_> = all_options.iter().collect();
|
||||
sorted_options.sort_by(|a, b| a.0.cmp(b.0));
|
||||
let sorted_options_map: std::collections::BTreeMap<_, _> =
|
||||
sorted_options.into_iter().collect();
|
||||
|
||||
let options_str = if sorted_options_map.is_empty() {
|
||||
"{}".to_string()
|
||||
} else {
|
||||
compression_table.set_format(*FORMAT_NO_BORDER_LINE_SEPARATOR);
|
||||
serde_yaml::to_string(&sorted_options_map)
|
||||
.unwrap_or_else(|_| "Unable to serialize options".to_string())
|
||||
.trim()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
// Show only non-null outputs from the plugin
|
||||
// Collect and sort outputs by their string representation
|
||||
let mut enabled_output_pairs = Vec::new();
|
||||
for (key, value) in actual_plugin.outputs() {
|
||||
// Skip null values (disabled outputs)
|
||||
if value.is_null() {
|
||||
continue;
|
||||
}
|
||||
|
||||
compression_table.set_titles(row!(
|
||||
b->"Type",
|
||||
b->"Found",
|
||||
b->"Default",
|
||||
b->"Binary",
|
||||
b->"Compress",
|
||||
b->"Decompress"));
|
||||
|
||||
for info in compression_info {
|
||||
compression_table.add_row(Row::new(vec![
|
||||
Cell::new(&info.compression_type),
|
||||
match info.found {
|
||||
true => Cell::new("Yes").with_style(Attr::ForegroundColor(color::GREEN)),
|
||||
false => Cell::new("No").with_style(Attr::ForegroundColor(color::RED)),
|
||||
},
|
||||
match info.default {
|
||||
true => Cell::new("Yes").with_style(Attr::ForegroundColor(color::GREEN)),
|
||||
false => Cell::new("No"),
|
||||
},
|
||||
match info.binary.as_str() {
|
||||
"<INTERNAL>" => Cell::new(&info.binary).with_style(Attr::ForegroundColor(color::BRIGHT_BLACK)),
|
||||
_ => Cell::new(&info.binary),
|
||||
},
|
||||
Cell::new(&info.compress),
|
||||
Cell::new(&info.decompress),
|
||||
]));
|
||||
// Convert serde_yaml::Value to a string representation
|
||||
let value_str = match value {
|
||||
serde_yaml::Value::String(s) => s.clone(),
|
||||
serde_yaml::Value::Number(n) => n.to_string(),
|
||||
serde_yaml::Value::Bool(b) => b.to_string(),
|
||||
serde_yaml::Value::Null => "null".to_string(),
|
||||
serde_yaml::Value::Sequence(_) => {
|
||||
serde_yaml::to_string(value).unwrap_or_else(|_| "[]".to_string())
|
||||
}
|
||||
|
||||
compression_table
|
||||
serde_yaml::Value::Mapping(_) => {
|
||||
serde_yaml::to_string(value).unwrap_or_else(|_| "{}".to_string())
|
||||
}
|
||||
|
||||
fn build_meta_plugin_table(meta_plugin_info: &Vec<MetaPluginInfo>) -> Table {
|
||||
let mut meta_plugin_table = Table::new();
|
||||
if std::io::stdout().is_terminal() {
|
||||
meta_plugin_table.set_format(get_format_box_chars_no_border_line_separator());
|
||||
serde_yaml::Value::Tagged(_) => {
|
||||
serde_yaml::to_string(value).unwrap_or_else(|_| "tagged".to_string())
|
||||
}
|
||||
};
|
||||
// Trim any extra whitespace from the serialized values
|
||||
let value_str = value_str.trim().to_string();
|
||||
if key == &value_str {
|
||||
enabled_output_pairs.push((key.clone(), key.clone()));
|
||||
} else {
|
||||
meta_plugin_table.set_format(*FORMAT_NO_BORDER_LINE_SEPARATOR);
|
||||
enabled_output_pairs.push((key.clone(), format!("{}->{}", key, value_str)));
|
||||
}
|
||||
}
|
||||
|
||||
meta_plugin_table.set_titles(row!(
|
||||
b->"Meta Name",
|
||||
b->"Found",
|
||||
b->"Enabled",
|
||||
b->"Binary",
|
||||
b->"Args"));
|
||||
// Sort outputs by their display value (second element of the tuple)
|
||||
enabled_output_pairs.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
|
||||
for info in meta_plugin_info {
|
||||
meta_plugin_table.add_row(Row::new(vec![
|
||||
Cell::new(&info.meta_name),
|
||||
match info.found {
|
||||
true => Cell::new("Yes").with_style(Attr::ForegroundColor(color::GREEN)),
|
||||
false => Cell::new("No").with_style(Attr::ForegroundColor(color::RED)),
|
||||
},
|
||||
match info.enabled {
|
||||
true => Cell::new("Yes").with_style(Attr::ForegroundColor(color::GREEN)),
|
||||
false => Cell::new("No"),
|
||||
},
|
||||
match info.binary.as_str() {
|
||||
"<INTERNAL>" => Cell::new(&info.binary).with_style(Attr::ForegroundColor(color::BRIGHT_BLACK)),
|
||||
"<NOT FOUND>" => Cell::new(&info.binary).with_style(Attr::ForegroundColor(color::RED)),
|
||||
_ => Cell::new(&info.binary),
|
||||
},
|
||||
Cell::new(&info.args),
|
||||
]));
|
||||
// Join each output on a new line
|
||||
let outputs_str = if enabled_output_pairs.is_empty() {
|
||||
"{}".to_string()
|
||||
} else {
|
||||
enabled_output_pairs
|
||||
.into_iter()
|
||||
.map(|(_, display)| display)
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
};
|
||||
|
||||
table.add_row(vec![plugin_config.name.clone(), options_str, outputs_str]);
|
||||
}
|
||||
|
||||
meta_plugin_table
|
||||
Some(table)
|
||||
}
|
||||
|
||||
pub fn mode_status(
|
||||
_cmd: &mut Command,
|
||||
args: &crate::Args,
|
||||
cmd: &mut Command,
|
||||
settings: &config::Settings,
|
||||
data_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
// Determine which meta plugins would be enabled for a save operation
|
||||
let mut meta_plugin_types: Vec<MetaPluginType> = crate::modes::common::cmd_args_meta_plugin_types(_cmd, &args);
|
||||
debug!("STATUS: Starting mode_status function");
|
||||
|
||||
// Add digest type if specified
|
||||
let digest_type = crate::modes::common::cmd_args_digest_type(_cmd, &args);
|
||||
let digest_meta_plugin_type = match digest_type {
|
||||
crate::meta_plugin::MetaPluginType::DigestSha256 => Some(MetaPluginType::DigestSha256),
|
||||
crate::meta_plugin::MetaPluginType::DigestMd5 => Some(MetaPluginType::DigestMd5),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(digest_plugin_type) = digest_meta_plugin_type {
|
||||
if !meta_plugin_types.contains(&digest_plugin_type) {
|
||||
meta_plugin_types.push(digest_plugin_type);
|
||||
}
|
||||
}
|
||||
|
||||
let output_format = get_output_format(args);
|
||||
let status_info = generate_status_info(data_path, db_path, &meta_plugin_types);
|
||||
let status_service = crate::services::status_service::StatusService::new();
|
||||
let output_format = crate::modes::common::settings_output_format(settings);
|
||||
debug!("STATUS: About to generate status info");
|
||||
let status_info = status_service.generate_status(cmd, settings, data_path, db_path);
|
||||
debug!("STATUS: Status info generated successfully");
|
||||
|
||||
match output_format {
|
||||
OutputFormat::Table => {
|
||||
println!("CONFIG:");
|
||||
let config_table = build_config_table(settings);
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&config_table.trim_fmt())
|
||||
);
|
||||
println!();
|
||||
|
||||
println!("PATHS:");
|
||||
build_path_table(&status_info.paths).printstd();
|
||||
let path_table = build_path_table(&status_info.paths);
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&path_table.trim_fmt())
|
||||
);
|
||||
println!();
|
||||
println!("COMPRESSION:");
|
||||
build_compression_table(&status_info.compression).printstd();
|
||||
|
||||
// Always try to print META PLUGINS CONFIGURED section using status_info
|
||||
if let Some(meta_plugins_table) = build_meta_plugins_configured_table(&status_info) {
|
||||
println!("META PLUGINS CONFIGURED:");
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&meta_plugins_table.trim_fmt())
|
||||
);
|
||||
println!();
|
||||
println!("META PLUGINS:");
|
||||
build_meta_plugin_table(&status_info.meta_plugins).printstd();
|
||||
} else {
|
||||
println!("META PLUGINS CONFIGURED:");
|
||||
println!("No plugins configured");
|
||||
println!();
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
OutputFormat::Json => {
|
||||
// Create a subset for status info that includes everything
|
||||
println!("{}", serde_json::to_string_pretty(&status_info)?);
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
OutputFormat::Yaml => {
|
||||
println!("{}", serde_yaml::to_string(&status_info)?);
|
||||
Ok(())
|
||||
|
||||
368
src/modes/status_plugins.rs
Normal file
368
src/modes/status_plugins.rs
Normal file
@@ -0,0 +1,368 @@
|
||||
use clap::*;
|
||||
use log::debug;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// Helper function to convert serde_json::Value to serde_yaml::Value.
|
||||
///
|
||||
/// Recursively converts JSON values to equivalent YAML values, handling null, bool, number,
|
||||
/// string, array, and object types.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `value` - Reference to the JSON value to convert.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The equivalent YAML value.
|
||||
fn convert_json_to_yaml_value(value: &serde_json::Value) -> serde_yaml::Value {
|
||||
match value {
|
||||
serde_json::Value::Null => serde_yaml::Value::Null,
|
||||
serde_json::Value::Bool(b) => serde_yaml::Value::Bool(*b),
|
||||
serde_json::Value::Number(n) => {
|
||||
if let Some(i) = n.as_i64() {
|
||||
serde_yaml::Value::Number(serde_yaml::Number::from(i))
|
||||
} else if let Some(f) = n.as_f64() {
|
||||
serde_yaml::Value::Number(serde_yaml::Number::from(f))
|
||||
} else {
|
||||
serde_yaml::Value::String(n.to_string())
|
||||
}
|
||||
}
|
||||
serde_json::Value::String(s) => serde_yaml::Value::String(s.clone()),
|
||||
serde_json::Value::Array(arr) => {
|
||||
let mut yaml_array = Vec::new();
|
||||
for item in arr {
|
||||
yaml_array.push(convert_json_to_yaml_value(item));
|
||||
}
|
||||
serde_yaml::Value::Sequence(yaml_array)
|
||||
}
|
||||
serde_json::Value::Object(obj) => {
|
||||
let mut yaml_mapping = serde_yaml::Mapping::new();
|
||||
for (k, v) in obj {
|
||||
yaml_mapping.insert(
|
||||
serde_yaml::Value::String(k.clone()),
|
||||
convert_json_to_yaml_value(v),
|
||||
);
|
||||
}
|
||||
serde_yaml::Value::Mapping(yaml_mapping)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use crate::config;
|
||||
use crate::modes::common::OutputFormat;
|
||||
use comfy_table::{Attribute, Cell, Table};
|
||||
use serde_json;
|
||||
use serde_yaml;
|
||||
|
||||
use crate::common::status::{CompressionInfo, MetaPluginInfo};
|
||||
use crate::meta_plugin::{MetaPluginType, get_meta_plugin};
|
||||
|
||||
fn build_meta_plugin_table(
|
||||
meta_plugin_info: &std::collections::HashMap<String, MetaPluginInfo>,
|
||||
) -> Table {
|
||||
// Builds a formatted table displaying meta plugin information.
|
||||
//
|
||||
// Sorts plugins by name and displays options as YAML and outputs as a list.
|
||||
//
|
||||
// # Arguments
|
||||
//
|
||||
// * `meta_plugin_info` - HashMap of meta plugin information.
|
||||
//
|
||||
// # Returns
|
||||
//
|
||||
// A formatted `comfy_table::Table`.
|
||||
let mut meta_plugin_table = crate::modes::common::create_table(true);
|
||||
|
||||
meta_plugin_table.set_header(vec![
|
||||
Cell::new("Plugin Name").add_attribute(Attribute::Bold),
|
||||
Cell::new("Options").add_attribute(Attribute::Bold),
|
||||
Cell::new("Outputs").add_attribute(Attribute::Bold),
|
||||
]);
|
||||
|
||||
// Sort meta plugin info by plugin name
|
||||
let mut sorted_meta_plugin_info: Vec<&MetaPluginInfo> = meta_plugin_info.values().collect();
|
||||
sorted_meta_plugin_info.sort_by(|a, b| a.meta_name.cmp(&b.meta_name));
|
||||
|
||||
for info in sorted_meta_plugin_info {
|
||||
// Get default options for the meta plugin
|
||||
let meta_plugin_type = match MetaPluginType::from_str(&info.meta_name) {
|
||||
Ok(plugin_type) => plugin_type,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
// Create a default plugin to get its default options
|
||||
let default_plugin = get_meta_plugin(meta_plugin_type.clone(), None, None);
|
||||
|
||||
// Get and sort options
|
||||
let mut options: Vec<_> = default_plugin.options().iter().collect();
|
||||
options.sort_by(|a, b| a.0.cmp(b.0));
|
||||
|
||||
// Format options as YAML string, each on a new line
|
||||
let options_str = if options.is_empty() {
|
||||
"{}".to_string()
|
||||
} else {
|
||||
let options_map: std::collections::BTreeMap<_, _> = options.into_iter().collect();
|
||||
serde_yaml::to_string(&options_map)
|
||||
.unwrap_or_else(|_| "Unable to serialize options".to_string())
|
||||
.trim()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
// Get and sort output keys
|
||||
let mut output_keys: Vec<String> = info.outputs.keys().map(|k| k.to_string()).collect();
|
||||
output_keys.sort();
|
||||
let outputs_display = if output_keys.is_empty() {
|
||||
"{}".to_string()
|
||||
} else {
|
||||
output_keys.join("\n")
|
||||
};
|
||||
|
||||
meta_plugin_table.add_row(vec![info.meta_name.clone(), options_str, outputs_display]);
|
||||
}
|
||||
|
||||
meta_plugin_table
|
||||
}
|
||||
|
||||
fn build_compression_table(compression_info: &Vec<CompressionInfo>) -> Table {
|
||||
// Builds a formatted table displaying compression plugin information.
|
||||
//
|
||||
// # Arguments
|
||||
//
|
||||
// * `compression_info` - Vector of compression info.
|
||||
//
|
||||
// # Returns
|
||||
//
|
||||
// A formatted `comfy_table::Table`.
|
||||
let mut compression_table = crate::modes::common::create_table(true);
|
||||
|
||||
compression_table.set_header(vec![
|
||||
Cell::new("Type").add_attribute(Attribute::Bold),
|
||||
Cell::new("Found").add_attribute(Attribute::Bold),
|
||||
Cell::new("Enabled").add_attribute(Attribute::Bold),
|
||||
Cell::new("Binary").add_attribute(Attribute::Bold),
|
||||
Cell::new("Compress").add_attribute(Attribute::Bold),
|
||||
Cell::new("Decompress").add_attribute(Attribute::Bold),
|
||||
]);
|
||||
|
||||
for info in compression_info {
|
||||
compression_table.add_row(vec![
|
||||
info.compression_type.clone(),
|
||||
match info.found {
|
||||
true => "Yes".to_string(),
|
||||
false => "No".to_string(),
|
||||
},
|
||||
match info.default {
|
||||
true => "Yes".to_string(),
|
||||
false => "No".to_string(),
|
||||
},
|
||||
info.binary.clone(),
|
||||
info.compress.clone(),
|
||||
info.decompress.clone(),
|
||||
]);
|
||||
}
|
||||
|
||||
compression_table
|
||||
}
|
||||
|
||||
fn build_filter_plugin_table(filter_plugins: &[crate::common::status::FilterPluginInfo]) -> Table {
|
||||
// Builds a formatted table displaying filter plugin information.
|
||||
//
|
||||
// Sorts plugins by name and formats options as YAML sequence.
|
||||
//
|
||||
// # Arguments
|
||||
//
|
||||
// * `filter_plugins` - Vector of filter plugin info.
|
||||
//
|
||||
// # Returns
|
||||
//
|
||||
// A formatted `comfy_table::Table`.
|
||||
let mut filter_plugin_table = crate::modes::common::create_table(true);
|
||||
|
||||
filter_plugin_table.set_header(vec![
|
||||
Cell::new("Plugin Name").add_attribute(Attribute::Bold),
|
||||
Cell::new("Options").add_attribute(Attribute::Bold),
|
||||
Cell::new("Description").add_attribute(Attribute::Bold),
|
||||
]);
|
||||
|
||||
// Sort plugins by name
|
||||
let mut sorted_plugins: Vec<_> = filter_plugins.iter().collect();
|
||||
sorted_plugins.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
|
||||
for plugin_info in sorted_plugins {
|
||||
// Format options as YAML string
|
||||
let options_str = if plugin_info.options.is_empty() {
|
||||
"{}".to_string()
|
||||
} else {
|
||||
// Convert options to a proper structure for display
|
||||
let mut options_list = Vec::new();
|
||||
for opt in &plugin_info.options {
|
||||
let mut opt_map = serde_yaml::Mapping::new();
|
||||
opt_map.insert(
|
||||
serde_yaml::Value::String("name".to_string()),
|
||||
serde_yaml::Value::String(opt.name.clone()),
|
||||
);
|
||||
if let Some(default) = &opt.default {
|
||||
// Convert serde_json::Value to serde_yaml::Value
|
||||
let yaml_value = match default {
|
||||
serde_json::Value::Null => serde_yaml::Value::Null,
|
||||
serde_json::Value::Bool(b) => serde_yaml::Value::Bool(*b),
|
||||
serde_json::Value::Number(n) => {
|
||||
if let Some(i) = n.as_i64() {
|
||||
serde_yaml::Value::Number(serde_yaml::Number::from(i))
|
||||
} else if let Some(f) = n.as_f64() {
|
||||
serde_yaml::Value::Number(serde_yaml::Number::from(f))
|
||||
} else {
|
||||
serde_yaml::Value::String(default.to_string())
|
||||
}
|
||||
}
|
||||
serde_json::Value::String(s) => serde_yaml::Value::String(s.clone()),
|
||||
serde_json::Value::Array(arr) => {
|
||||
// Convert each element in the array
|
||||
let mut yaml_array = Vec::new();
|
||||
for item in arr {
|
||||
yaml_array.push(convert_json_to_yaml_value(item));
|
||||
}
|
||||
serde_yaml::Value::Sequence(yaml_array)
|
||||
}
|
||||
serde_json::Value::Object(obj) => {
|
||||
// Convert each key-value pair in the object
|
||||
let mut yaml_mapping = serde_yaml::Mapping::new();
|
||||
for (k, v) in obj {
|
||||
yaml_mapping.insert(
|
||||
serde_yaml::Value::String(k.clone()),
|
||||
convert_json_to_yaml_value(v),
|
||||
);
|
||||
}
|
||||
serde_yaml::Value::Mapping(yaml_mapping)
|
||||
}
|
||||
};
|
||||
opt_map.insert(serde_yaml::Value::String("default".to_string()), yaml_value);
|
||||
} else {
|
||||
opt_map.insert(
|
||||
serde_yaml::Value::String("default".to_string()),
|
||||
serde_yaml::Value::Null,
|
||||
);
|
||||
}
|
||||
opt_map.insert(
|
||||
serde_yaml::Value::String("required".to_string()),
|
||||
serde_yaml::Value::Bool(opt.required),
|
||||
);
|
||||
options_list.push(serde_yaml::Value::Mapping(opt_map));
|
||||
}
|
||||
serde_yaml::to_string(&serde_yaml::Value::Sequence(options_list))
|
||||
.unwrap_or_else(|_| "Unable to serialize options".to_string())
|
||||
.trim()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
filter_plugin_table.add_row(vec![
|
||||
plugin_info.name.clone(),
|
||||
options_str,
|
||||
plugin_info.description.clone(),
|
||||
]);
|
||||
}
|
||||
|
||||
// If no filter plugins are available, add a row indicating that
|
||||
if filter_plugins.is_empty() {
|
||||
filter_plugin_table.add_row(vec!["No filter plugins available", "{}", ""]);
|
||||
}
|
||||
|
||||
filter_plugin_table
|
||||
}
|
||||
|
||||
pub fn mode_status_plugins(
|
||||
cmd: &mut Command,
|
||||
settings: &config::Settings,
|
||||
data_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
// Displays status information for available plugins in the specified output format.
|
||||
//
|
||||
// Generates status using StatusService and renders as table, JSON, or YAML.
|
||||
//
|
||||
// # Arguments
|
||||
//
|
||||
// * `cmd` - Mutable Clap command.
|
||||
// * `settings` - Application settings.
|
||||
// * `data_path` - Data directory path.
|
||||
// * `db_path` - Database path.
|
||||
//
|
||||
// # Returns
|
||||
//
|
||||
// `Ok(())` on success, or anyhow::Error.
|
||||
debug!("STATUS_PLUGINS: Starting mode_status_plugins function");
|
||||
|
||||
let status_service = crate::services::status_service::StatusService::new();
|
||||
let output_format = crate::modes::common::settings_output_format(settings);
|
||||
debug!("STATUS_PLUGINS: About to generate status info");
|
||||
let status_info = status_service.generate_status(cmd, settings, data_path, db_path);
|
||||
debug!("STATUS_PLUGINS: Status info generated successfully");
|
||||
|
||||
match output_format {
|
||||
OutputFormat::Table => {
|
||||
println!("META PLUGINS:");
|
||||
let meta_table = build_meta_plugin_table(&status_info.meta_plugins);
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&meta_table.trim_fmt())
|
||||
);
|
||||
println!();
|
||||
|
||||
println!("COMPRESSION PLUGINS:");
|
||||
let compression_table = build_compression_table(&status_info.compression);
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&compression_table.trim_fmt())
|
||||
);
|
||||
println!();
|
||||
|
||||
println!("FILTER PLUGINS:");
|
||||
let filter_table = build_filter_plugin_table(&status_info.filter_plugins);
|
||||
println!(
|
||||
"{}",
|
||||
crate::modes::common::trim_lines_end(&filter_table.trim_fmt())
|
||||
);
|
||||
println!();
|
||||
Ok(())
|
||||
}
|
||||
OutputFormat::Json => {
|
||||
// Create a subset for plugins only using status_info
|
||||
let plugins_info = serde_json::json!({
|
||||
"meta_plugins_available": status_info.meta_plugins,
|
||||
"meta_plugins_configured": status_info.configured_meta_plugins,
|
||||
"filter_plugins": status_info.filter_plugins
|
||||
});
|
||||
println!("{}", serde_json::to_string_pretty(&plugins_info)?);
|
||||
Ok(())
|
||||
}
|
||||
OutputFormat::Yaml => {
|
||||
// Create a proper structure for plugins info using status_info
|
||||
use serde_yaml::Mapping;
|
||||
let mut plugins_mapping = Mapping::new();
|
||||
|
||||
// Add available plugins
|
||||
plugins_mapping.insert(
|
||||
serde_yaml::Value::String("meta_plugins_available".to_string()),
|
||||
serde_yaml::to_value(&status_info.meta_plugins)?,
|
||||
);
|
||||
|
||||
// Add configured plugins if they exist
|
||||
if let Some(configured_plugins) = &status_info.configured_meta_plugins {
|
||||
plugins_mapping.insert(
|
||||
serde_yaml::Value::String("meta_plugins_configured".to_string()),
|
||||
serde_yaml::to_value(configured_plugins)?,
|
||||
);
|
||||
}
|
||||
|
||||
// Add filter plugins
|
||||
plugins_mapping.insert(
|
||||
serde_yaml::Value::String("filter_plugins".to_string()),
|
||||
serde_yaml::to_value(&status_info.filter_plugins)?,
|
||||
);
|
||||
|
||||
println!("{}", serde_yaml::to_string(&plugins_mapping)?);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,134 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::compression_engine::{CompressionType, get_compression_engine};
|
||||
use crate::db;
|
||||
use crate::meta_plugin;
|
||||
use crate::modes::common::{cmd_args_digest_type, get_digest_type_meta};
|
||||
use clap::Command;
|
||||
use clap::error::ErrorKind;
|
||||
use log::{debug, info};
|
||||
use rusqlite::Connection;
|
||||
|
||||
pub fn mode_update(
|
||||
cmd: &mut Command,
|
||||
args: &crate::Args,
|
||||
ids: &mut Vec<i64>,
|
||||
tags: &mut Vec<String>,
|
||||
conn: &mut Connection,
|
||||
data_path: PathBuf,
|
||||
) -> Result<()> {
|
||||
if ids.is_empty() {
|
||||
cmd.error(
|
||||
ErrorKind::InvalidValue,
|
||||
"No ID given, you must supply exactly one ID when using --update",
|
||||
)
|
||||
.exit();
|
||||
} else if ids.len() > 1 {
|
||||
cmd.error(ErrorKind::InvalidValue, "More than one ID given, you must supply exactly one ID or atleast one tag when using --update").exit();
|
||||
}
|
||||
|
||||
let item_id = ids.iter().next().expect("Unable to determine item id");
|
||||
let item_maybe = db::get_item(conn, *item_id)?;
|
||||
|
||||
let mut item = item_maybe.expect("Unable to find item in database");
|
||||
debug!("MAIN: Found item {:?}", item);
|
||||
|
||||
// Use a transaction for database operations to ensure atomicity
|
||||
let tx = conn.transaction()?;
|
||||
|
||||
if !tags.is_empty() {
|
||||
debug!("MAIN: Updating item tags");
|
||||
db::set_item_tags(&tx, item.clone(), tags)?;
|
||||
}
|
||||
|
||||
let item_id = item.id.ok_or_else(|| anyhow!("Item missing ID"))?;
|
||||
let item_path = {
|
||||
let mut path = data_path.clone();
|
||||
path.push(item_id.to_string());
|
||||
path
|
||||
};
|
||||
|
||||
let compression_type = CompressionType::from_str(&item.compression)?;
|
||||
let compression_engine =
|
||||
get_compression_engine(compression_type).expect("Unable to get compression engine");
|
||||
|
||||
if item.size.is_none() {
|
||||
info!("Updating unknown stream size");
|
||||
let item_file_metadata = item_path.metadata();
|
||||
|
||||
if item_file_metadata.is_ok() {
|
||||
debug!("MAIN: Updating stream size of {:?}", item_path);
|
||||
let size = compression_engine.size(item_path.clone())?;
|
||||
item.size = Some(size as i64);
|
||||
db::update_item(&tx, item.clone())?;
|
||||
} else {
|
||||
debug!(
|
||||
"MAIN: Unable to update size of item due to missing file {:?}",
|
||||
item_path
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let digest_type = cmd_args_digest_type(cmd, args);
|
||||
let digest_meta = get_digest_type_meta(digest_type.clone());
|
||||
let digest_value = db::get_item_meta_value(&tx, &item, digest_meta)?;
|
||||
|
||||
if digest_value.is_none() || digest_value.unwrap().is_empty() {
|
||||
let item_file_metadata = item_path.metadata();
|
||||
|
||||
if item_file_metadata.is_ok() {
|
||||
debug!("MAIN: Updating stream size of {:?}", item_path);
|
||||
|
||||
// Create and initialize digest engine
|
||||
let mut digest_engine = meta_plugin::get_meta_plugin(digest_type.clone());
|
||||
|
||||
// Read file content and update digest
|
||||
let mut reader = compression_engine.open(item_path)?;
|
||||
let mut buffer = [0; 4096];
|
||||
loop {
|
||||
let bytes_read = reader.read(&mut buffer)?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
digest_engine.update(&buffer[..bytes_read]);
|
||||
}
|
||||
|
||||
// Get final digest value
|
||||
let digest_value = digest_engine.finalize()?;
|
||||
debug!("DIGEST: {}", digest_value);
|
||||
|
||||
// Save digest to meta
|
||||
let digest_meta_name = get_digest_type_meta(digest_type);
|
||||
let digest_meta = db::Meta {
|
||||
id: item_id,
|
||||
name: digest_meta_name,
|
||||
value: digest_value,
|
||||
};
|
||||
db::store_meta(&tx, digest_meta)?;
|
||||
} else {
|
||||
debug!(
|
||||
"MAIN: Unable to update digest of item due to missing file {:?}",
|
||||
item_path
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if !args.item.meta.is_empty() {
|
||||
debug!("MAIN: Updating item meta");
|
||||
for kv in args.item.meta.iter() {
|
||||
let meta = db::Meta {
|
||||
id: item_id,
|
||||
name: kv.key.to_string(),
|
||||
value: kv.value.to_string(),
|
||||
};
|
||||
db::store_meta(&tx, meta)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Commit the transaction
|
||||
tx.commit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
30
src/parser/filter.pest
Normal file
30
src/parser/filter.pest
Normal file
@@ -0,0 +1,30 @@
|
||||
WHITESPACE = _{ " " | "\t" | "\n" | "\r" }
|
||||
|
||||
filters = { filter ~ ("," ~ filters)? }
|
||||
filter = { filter_name ~ ("(" ~ options ~ ")")? }
|
||||
filter_name = @{ ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "_")* }
|
||||
|
||||
options = { option ~ ("," ~ options)? }
|
||||
option = { (option_name ~ "=")? ~ option_value }
|
||||
option_name = @{ ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "_")* }
|
||||
|
||||
option_value = {
|
||||
JSON_NUMBER |
|
||||
JSON_STRING |
|
||||
JSON_BOOLEAN
|
||||
}
|
||||
|
||||
JSON_NUMBER = @{
|
||||
("-")? ~
|
||||
("0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT*) ~
|
||||
("." ~ ASCII_DIGIT*)? ~
|
||||
(("e" | "E") ~ ("+" | "-")? ~ ASCII_DIGIT+)?
|
||||
}
|
||||
|
||||
JSON_STRING = ${
|
||||
"\"" ~
|
||||
(("\\" ~ ANY) | (!("\"" | "\\") ~ ANY))* ~
|
||||
"\""
|
||||
}
|
||||
|
||||
JSON_BOOLEAN = ${ "true" | "false" }
|
||||
119
src/parser/filter_parser.rs
Normal file
119
src/parser/filter_parser.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use pest::Parser;
|
||||
use pest_derive::Parser;
|
||||
use std::collections::HashMap;
|
||||
use serde_json;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[grammar = "filter.pest"]
|
||||
pub struct FilterParser;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Filter {
|
||||
pub name: String,
|
||||
pub options: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
pub fn parse_filter_string(input: &str) -> Result<Vec<Filter>, Box<dyn std::error::Error>> {
|
||||
let mut filters = Vec::new();
|
||||
let pairs = FilterParser::parse(<FilterParser as pest::Parser>::Rule::filters, input)?;
|
||||
|
||||
for pair in pairs {
|
||||
if pair.as_rule() == <FilterParser as pest::Parser>::Rule::filter {
|
||||
let mut name = String::new();
|
||||
let mut options = HashMap::new();
|
||||
|
||||
for inner_pair in pair.into_inner() {
|
||||
match inner_pair.as_rule() {
|
||||
<FilterParser as pest::Parser>::Rule::filter_name => {
|
||||
name = inner_pair.as_str().to_string();
|
||||
}
|
||||
<FilterParser as pest::Parser>::Rule::options => {
|
||||
for option_pair in inner_pair.into_inner() {
|
||||
if option_pair.as_rule() == <FilterParser as pest::Parser>::Rule::option {
|
||||
let mut option_name = None;
|
||||
let mut option_value = None;
|
||||
|
||||
for option_inner in option_pair.into_inner() {
|
||||
match option_inner.as_rule() {
|
||||
<FilterParser as pest::Parser>::Rule::option_name => {
|
||||
option_name = Some(option_inner.as_str().to_string());
|
||||
}
|
||||
<FilterParser as pest::Parser>::Rule::option_value => {
|
||||
option_value = Some(parse_option_value(option_inner.as_str())?);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(value) = option_value {
|
||||
// If no name is provided, use the filter name as the key
|
||||
let key = option_name.unwrap_or_else(|| name.clone());
|
||||
options.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
filters.push(Filter { name, options });
|
||||
}
|
||||
}
|
||||
|
||||
Ok(filters)
|
||||
}
|
||||
|
||||
fn parse_option_value(input: &str) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
|
||||
serde_json::from_str(input).map_err(|e| Box::new(e) as Box<dyn std::error::Error>)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_filter() {
|
||||
let result = parse_filter_string("grep").unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].name, "grep");
|
||||
assert!(result[0].options.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_filter_with_options() {
|
||||
let result = parse_filter_string("head_lines(10)").unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].name, "head_lines");
|
||||
assert_eq!(result[0].options.len(), 1);
|
||||
if let serde_json::Value::Number(n) = result[0].options.get("head_lines").unwrap() {
|
||||
assert_eq!(n.as_i64(), Some(10));
|
||||
} else {
|
||||
panic!("Expected number");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_filter_with_named_options() {
|
||||
let result = parse_filter_string(r#"grep(pattern="error")"#).unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].name, "grep");
|
||||
assert_eq!(result[0].options.get("pattern").unwrap().as_str(), Some("error"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_multiple_filters() {
|
||||
let result = parse_filter_string(r#"head_lines(10),grep(pattern="error")"#).unwrap();
|
||||
assert_eq!(result.len(), 2);
|
||||
assert_eq!(result[0].name, "head_lines");
|
||||
assert_eq!(result[0].options.len(), 1);
|
||||
if let serde_json::Value::Number(n) = result[0].options.get("head_lines").unwrap() {
|
||||
assert_eq!(n.as_i64(), Some(10));
|
||||
} else {
|
||||
panic!("Expected number");
|
||||
}
|
||||
assert_eq!(result[1].name, "grep");
|
||||
assert_eq!(result[1].options.len(), 1);
|
||||
assert_eq!(result[1].options.get("pattern").unwrap().as_str(), Some("error"));
|
||||
}
|
||||
}
|
||||
15
src/parser/mod.rs
Normal file
15
src/parser/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
/// Parsing utilities for filters and other inputs.
|
||||
///
|
||||
/// This module provides tools for parsing filter strings and other structured
|
||||
/// inputs used throughout the application. Currently, it includes a pest-based
|
||||
/// parser for filter expressions.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use keep::parser::parse_filter_string;
|
||||
/// let filters = parse_filter_string("head:5|grep:hello").unwrap();
|
||||
/// ```
|
||||
pub mod filter_parser;
|
||||
|
||||
pub use filter_parser::{FilterParser, parse_filter_string};
|
||||
25
src/plugin.rs
Normal file
25
src/plugin.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use std::io::Write;
|
||||
|
||||
use derive_more::{Deref, DerefMut};
|
||||
|
||||
/// A wrapper around a child process's stdin that implements the Write trait.
|
||||
///
|
||||
/// This struct allows writing data to an external process's standard input
|
||||
/// in a way that's compatible with Rust's I/O traits.
|
||||
#[derive(Deref, DerefMut)]
|
||||
pub struct ProgramWriter {
|
||||
/// The stdin handle of a spawned child process
|
||||
#[deref]
|
||||
#[deref_mut]
|
||||
pub stdin: std::process::ChildStdin,
|
||||
}
|
||||
|
||||
impl Write for ProgramWriter {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.stdin.write(buf)
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
self.stdin.flush()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,21 @@
|
||||
pub(crate) use std::io::Write;
|
||||
//! Shared plugin utilities for the keep application.
|
||||
//!
|
||||
//! This module provides common functionality that can be used by different
|
||||
//! plugin implementations throughout the application.
|
||||
|
||||
use std::io::Write;
|
||||
|
||||
use derive_more::{Deref, DerefMut};
|
||||
|
||||
/// A wrapper around a child process's stdin that implements the Write trait.
|
||||
///
|
||||
/// This struct allows writing data to an external process's standard input
|
||||
/// in a way that's compatible with Rust's I/O traits.
|
||||
#[derive(Deref, DerefMut)]
|
||||
pub struct ProgramWriter {
|
||||
/// The stdin handle of a spawned child process
|
||||
#[deref]
|
||||
#[deref_mut]
|
||||
pub stdin: std::process::ChildStdin,
|
||||
}
|
||||
|
||||
|
||||
403
src/services/async_item_service.rs
Normal file
403
src/services/async_item_service.rs
Normal file
@@ -0,0 +1,403 @@
|
||||
/// Asynchronous service wrapper for `ItemService`.
|
||||
///
|
||||
/// Uses `tokio::task::spawn_blocking` to offload synchronous operations (DB/FS)
|
||||
/// to a blocking thread pool, allowing non-blocking async usage in servers.
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::config::Settings;
|
||||
use crate::services::error::CoreError;
|
||||
use crate::services::item_service::ItemService;
|
||||
use crate::services::types::{ItemWithContent, ItemWithMeta};
|
||||
use clap::Command;
|
||||
use rusqlite::Connection;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
/// An asynchronous wrapper around the `ItemService` for use in async contexts like the web server.
|
||||
/// It uses `tokio::task::spawn_blocking` to run synchronous database and filesystem operations
|
||||
/// on a dedicated thread pool, preventing them from blocking the async runtime.
|
||||
#[allow(dead_code)]
|
||||
/// Async wrapper for ItemService operations.
|
||||
pub struct AsyncItemService {
|
||||
pub data_dir: PathBuf,
|
||||
db: Arc<Mutex<Connection>>,
|
||||
item_service: Arc<ItemService>,
|
||||
cmd: Arc<Mutex<Command>>,
|
||||
settings: Arc<Settings>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl AsyncItemService {
|
||||
/// Creates a new `AsyncItemService`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `data_dir` - Path to data directory.
|
||||
/// * `db` - Arc-wrapped mutex for DB connection.
|
||||
/// * `item_service` - Arc-wrapped ItemService.
|
||||
/// * `cmd` - Arc-wrapped mutex for Clap command.
|
||||
/// * `settings` - Arc-wrapped settings.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `AsyncItemService`.
|
||||
pub fn new(
|
||||
data_dir: PathBuf,
|
||||
db: Arc<Mutex<Connection>>,
|
||||
item_service: Arc<ItemService>,
|
||||
cmd: Arc<Mutex<Command>>,
|
||||
settings: Arc<Settings>,
|
||||
) -> Self {
|
||||
Self {
|
||||
data_dir,
|
||||
db,
|
||||
item_service,
|
||||
cmd,
|
||||
settings,
|
||||
}
|
||||
}
|
||||
|
||||
/// Internal helper to execute synchronous operations in a blocking task.
|
||||
///
|
||||
/// Spawns a blocking task with the DB connection and ItemService.
|
||||
///
|
||||
/// # Type Parameters
|
||||
///
|
||||
/// * `F` - Closure type.
|
||||
/// * `T` - Return type.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `f` - The synchronous closure to execute.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Result of the closure, or CoreError on task failure.
|
||||
async fn execute_blocking<F, T>(&self, f: F) -> Result<T, CoreError>
|
||||
where
|
||||
F: FnOnce(&Connection, &ItemService) -> Result<T, CoreError> + Send + 'static,
|
||||
T: Send + 'static,
|
||||
{
|
||||
let db = self.db.clone();
|
||||
let item_service = self.item_service.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let conn = db.blocking_lock();
|
||||
f(&conn, &item_service)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| CoreError::Other(anyhow::anyhow!("Blocking task failed: {}", e)))?
|
||||
}
|
||||
|
||||
pub async fn get_item(&self, id: i64) -> Result<ItemWithMeta, CoreError> {
|
||||
self.execute_blocking(move |conn, item_service| item_service.get_item(conn, id))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_item_content(&self, id: i64) -> Result<ItemWithContent, CoreError> {
|
||||
self.execute_blocking(move |conn, item_service| item_service.get_item_content(conn, id))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_item_content_info(
|
||||
&self,
|
||||
id: i64,
|
||||
filter: Option<String>,
|
||||
) -> Result<(Vec<u8>, String, bool), CoreError> {
|
||||
self.execute_blocking(move |conn, item_service| {
|
||||
item_service.get_item_content_info(conn, id, filter)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn stream_item_content_by_id(
|
||||
&self,
|
||||
item_id: i64,
|
||||
allow_binary: bool,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
) -> Result<
|
||||
(
|
||||
std::pin::Pin<
|
||||
Box<
|
||||
dyn tokio_stream::Stream<
|
||||
Item = Result<tokio_util::bytes::Bytes, std::io::Error>,
|
||||
> + Send,
|
||||
>,
|
||||
>,
|
||||
String,
|
||||
),
|
||||
CoreError,
|
||||
> {
|
||||
let content = self
|
||||
.execute_blocking(move |conn, item_service| {
|
||||
let item_with_content = item_service.get_item_content(conn, item_id)?;
|
||||
Ok::<_, CoreError>(item_with_content.content)
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Clone content for use in the binary check closure
|
||||
let content_clone = content.clone();
|
||||
|
||||
// Get metadata to determine MIME type and binary status
|
||||
let (mime_type, is_binary) = {
|
||||
let db = self.db.clone();
|
||||
let item_service = self.item_service.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let conn = db.blocking_lock();
|
||||
let item_with_meta = item_service.get_item(&conn, item_id)?;
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
|
||||
let mime_type = metadata
|
||||
.get("mime_type")
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||
|
||||
let is_binary = if let Some(text_val) = metadata.get("text") {
|
||||
text_val == "false"
|
||||
} else {
|
||||
crate::common::is_binary::is_binary(&content_clone)
|
||||
};
|
||||
|
||||
Ok::<_, CoreError>((mime_type, is_binary))
|
||||
})
|
||||
.await
|
||||
.unwrap()?
|
||||
};
|
||||
|
||||
// Check if content is binary when allow_binary is false
|
||||
if !allow_binary && is_binary {
|
||||
return Err(CoreError::InvalidInput(
|
||||
"Binary content not allowed".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Create a stream that reads only the requested portion
|
||||
let content_len = content.len() as u64;
|
||||
|
||||
// Apply offset and length constraints
|
||||
let start = std::cmp::min(offset, content_len);
|
||||
let end = if length > 0 {
|
||||
std::cmp::min(start + length, content_len)
|
||||
} else {
|
||||
content_len
|
||||
};
|
||||
|
||||
let stream = if start < content_len {
|
||||
let chunk =
|
||||
tokio_util::bytes::Bytes::from(content[start as usize..end as usize].to_vec());
|
||||
Box::pin(tokio_stream::iter(vec![Ok(chunk)]))
|
||||
} else {
|
||||
Box::pin(tokio_stream::iter(vec![]))
|
||||
};
|
||||
|
||||
Ok((stream, mime_type))
|
||||
}
|
||||
|
||||
pub async fn stream_item_content_by_id_with_metadata(
|
||||
&self,
|
||||
item_id: i64,
|
||||
metadata: &HashMap<String, String>,
|
||||
allow_binary: bool,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
filter: Option<String>,
|
||||
) -> Result<
|
||||
(
|
||||
std::pin::Pin<
|
||||
Box<
|
||||
dyn tokio_stream::Stream<
|
||||
Item = Result<tokio_util::bytes::Bytes, std::io::Error>,
|
||||
> + Send,
|
||||
>,
|
||||
>,
|
||||
String,
|
||||
),
|
||||
CoreError,
|
||||
> {
|
||||
// Use provided metadata to determine MIME type and binary status
|
||||
let mime_type = metadata
|
||||
.get("mime_type")
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||
|
||||
// Check if content is binary when allow_binary is false
|
||||
if !allow_binary {
|
||||
let is_binary = if let Some(text_val) = metadata.get("text") {
|
||||
text_val == "false"
|
||||
} else {
|
||||
// Get binary status using streaming approach
|
||||
let (_, _, is_binary) = self.get_item_content_info_streaming(item_id, None).await?;
|
||||
is_binary
|
||||
};
|
||||
|
||||
if is_binary {
|
||||
return Err(CoreError::InvalidInput(
|
||||
"Binary content not allowed".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Get a streaming reader for the content with filtering applied
|
||||
let reader = {
|
||||
let db = self.db.clone();
|
||||
let item_service = self.item_service.clone();
|
||||
let item_id = item_id;
|
||||
let filter = filter.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let conn = db.blocking_lock();
|
||||
item_service
|
||||
.get_item_content_info_streaming(&conn, item_id, filter)
|
||||
.map(|(reader, _, _)| reader)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| CoreError::Other(anyhow::anyhow!("Blocking task failed: {}", e)))?
|
||||
};
|
||||
|
||||
// Convert the reader into an async stream manually
|
||||
use tokio_util::bytes::Bytes;
|
||||
|
||||
// Create a channel to stream data between the blocking thread and async runtime
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(1);
|
||||
|
||||
// Spawn a blocking task to read from the reader and send chunks
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut reader = reader;
|
||||
// Apply offset by reading and discarding bytes
|
||||
if offset > 0 {
|
||||
let mut remaining = offset;
|
||||
let mut buf = [0; PIPESIZE];
|
||||
while remaining > 0 {
|
||||
let to_read = std::cmp::min(remaining, buf.len() as u64);
|
||||
match reader.as_mut().unwrap().read(&mut buf[..to_read as usize]) {
|
||||
Ok(0) => break, // EOF reached before offset
|
||||
Ok(n) => remaining -= n as u64,
|
||||
Err(e) => {
|
||||
let _ = tx.blocking_send(Err(e));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read and send data up to the specified length
|
||||
let mut remaining_length = length;
|
||||
let mut buffer = [0; PIPESIZE];
|
||||
|
||||
loop {
|
||||
// Determine how much to read in this iteration
|
||||
let to_read = if length > 0 {
|
||||
// If length is specified, don't read more than remaining_length
|
||||
std::cmp::min(remaining_length, buffer.len() as u64) as usize
|
||||
} else {
|
||||
buffer.len()
|
||||
};
|
||||
|
||||
if to_read == 0 {
|
||||
break; // We've read the requested length
|
||||
}
|
||||
|
||||
match reader.as_mut().unwrap().read(&mut buffer[..to_read]) {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(n) => {
|
||||
let chunk = Bytes::copy_from_slice(&buffer[..n]);
|
||||
// Block on sending to the channel
|
||||
if tx.blocking_send(Ok(chunk)).is_err() {
|
||||
break; // Receiver dropped
|
||||
}
|
||||
if length > 0 {
|
||||
remaining_length -= n as u64;
|
||||
if remaining_length == 0 {
|
||||
break; // Reached the requested length
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = tx.blocking_send(Err(e));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Convert the receiver into a stream
|
||||
let stream = tokio_stream::wrappers::ReceiverStream::new(rx);
|
||||
|
||||
Ok((Box::pin(stream), mime_type))
|
||||
}
|
||||
|
||||
pub async fn get_item_content_info_streaming(
|
||||
&self,
|
||||
item_id: i64,
|
||||
filter: Option<String>,
|
||||
) -> Result<(Box<dyn Read + Send>, String, bool), CoreError> {
|
||||
self.execute_blocking(move |conn, item_service| {
|
||||
item_service.get_item_content_info_streaming(conn, item_id, filter)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn find_item(
|
||||
&self,
|
||||
ids: Vec<i64>,
|
||||
tags: Vec<String>,
|
||||
meta: HashMap<String, String>,
|
||||
) -> Result<ItemWithMeta, CoreError> {
|
||||
let ids_clone = ids.clone();
|
||||
let tags_clone = tags.clone();
|
||||
let meta_clone = meta.clone();
|
||||
self.execute_blocking(move |conn, item_service| {
|
||||
item_service.find_item(conn, &ids_clone, &tags_clone, &meta_clone)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn list_items(
|
||||
&self,
|
||||
tags: Vec<String>,
|
||||
meta: HashMap<String, String>,
|
||||
) -> Result<Vec<ItemWithMeta>, CoreError> {
|
||||
let tags_clone = tags.clone();
|
||||
let meta_clone = meta.clone();
|
||||
self.execute_blocking(move |conn, item_service| {
|
||||
item_service.list_items(conn, &tags_clone, &meta_clone)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_item(&self, id: i64) -> Result<(), CoreError> {
|
||||
let db = self.db.clone();
|
||||
let item_service = self.item_service.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut conn = db.blocking_lock();
|
||||
item_service.delete_item(&mut conn, id)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn save_item_from_mcp(
|
||||
&self,
|
||||
content: Vec<u8>,
|
||||
tags: Vec<String>,
|
||||
metadata: HashMap<String, String>,
|
||||
) -> Result<ItemWithMeta, CoreError> {
|
||||
let db = self.db.clone();
|
||||
let item_service = self.item_service.clone();
|
||||
let cmd = self.cmd.clone();
|
||||
let settings = self.settings.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut conn = db.blocking_lock();
|
||||
let mut cmd = cmd.blocking_lock();
|
||||
let settings = settings.as_ref();
|
||||
item_service
|
||||
.save_item_from_mcp(&content, &tags, &metadata, &mut cmd, settings, &mut conn)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
146
src/services/compression_service.rs
Normal file
146
src/services/compression_service.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
use crate::compression_engine::{CompressionType, get_compression_engine};
|
||||
use crate::services::error::CoreError;
|
||||
use anyhow::anyhow;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub struct CompressionService;
|
||||
|
||||
/// Service for handling compression and decompression of item content.
|
||||
///
|
||||
/// Provides methods to read compressed item files either fully into memory
|
||||
/// or as streaming readers. Supports various compression types via engines.
|
||||
/// This service abstracts the underlying compression engines for consistent access.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = CompressionService::new();
|
||||
/// let content = service.get_item_content(path, "gzip")?;
|
||||
/// ```
|
||||
/// Provides methods to read compressed item files either fully into memory
|
||||
/// or as streaming readers. Supports various compression types via engines.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = CompressionService::new();
|
||||
/// let content = service.get_item_content(path, "gzip")?;
|
||||
/// ```
|
||||
impl CompressionService {
|
||||
/// Creates a new CompressionService instance.
|
||||
///
|
||||
/// This is a simple constructor; no initialization is required beyond the static methods.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `CompressionService` - A new instance of the service.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = CompressionService::new();
|
||||
/// ```
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
/// Reads and decompresses the full content of an item file into memory.
|
||||
///
|
||||
/// Loads the entire decompressed content as a byte vector. Suitable for small to medium files.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_path` - Path to the compressed item file on disk.
|
||||
/// * `compression` - Compression type as string (e.g., "gzip", "lz4"); case-insensitive.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Ok(Vec<u8>)` - The full decompressed content bytes.
|
||||
/// * `Err(CoreError)` - On failure (see errors).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::Compression(String)` - If the compression type string is invalid.
|
||||
/// * `CoreError::Other(anyhow::Error)` - If the file cannot be opened, the engine fails, or reading encounters an I/O error.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let content = service.get_item_content(item_path, "lz4")?;
|
||||
/// assert_eq!(content.len(), expected_size);
|
||||
/// ```
|
||||
pub fn get_item_content(
|
||||
&self,
|
||||
item_path: PathBuf,
|
||||
compression: &str,
|
||||
) -> Result<Vec<u8>, CoreError> {
|
||||
let compression_type = CompressionType::from_str(compression)
|
||||
.map_err(|e| CoreError::Compression(e.to_string()))?;
|
||||
let engine = get_compression_engine(compression_type)
|
||||
.map_err(|e| CoreError::Other(anyhow!(e.to_string())))?;
|
||||
|
||||
let mut reader = engine.open(item_path.clone()).map_err(|e| {
|
||||
CoreError::Other(anyhow!("Failed to open item file {:?}: {}", item_path, e))
|
||||
})?;
|
||||
let mut content = Vec::new();
|
||||
reader.read_to_end(&mut content)?;
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
/// Opens a streaming reader for decompressing item content.
|
||||
///
|
||||
/// Due to Send requirements in async contexts, this loads the full content into a Cursor.
|
||||
/// Warning: For very large files, this consumes significant memory; consider alternatives for streaming without loading all data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_path` - Path to the compressed item file on disk.
|
||||
/// * `compression` - Compression type as string (e.g., "gzip", "lz4"); case-insensitive.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Ok(Box<dyn Read + Send>)` - A boxed reader that can be used for streaming decompressed data.
|
||||
/// * `Err(CoreError)` - On failure (see errors).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::Compression(String)` - If the compression type string is invalid.
|
||||
/// * `CoreError::Other(anyhow::Error)` - If the file cannot be opened, the engine fails, or reading encounters an I/O error.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut reader = service.stream_item_content(item_path, "gzip")?;
|
||||
/// let mut buf = [0; 1024];
|
||||
/// let n = reader.read(&mut buf)?;
|
||||
/// ```
|
||||
pub fn stream_item_content(
|
||||
&self,
|
||||
item_path: PathBuf,
|
||||
compression: &str,
|
||||
) -> Result<Box<dyn Read + Send>, CoreError> {
|
||||
let compression_type = CompressionType::from_str(compression)
|
||||
.map_err(|e| CoreError::Compression(e.to_string()))?;
|
||||
let engine = get_compression_engine(compression_type)
|
||||
.map_err(|e| CoreError::Other(anyhow!(e.to_string())))?;
|
||||
|
||||
let reader = engine.open(item_path.clone()).map_err(|e| {
|
||||
CoreError::Other(anyhow!("Failed to open item file {:?}: {}", item_path, e))
|
||||
})?;
|
||||
// Since we can't guarantee the reader implements Send, we need to wrap it
|
||||
// We'll read the content into a buffer and return a Cursor which is Send
|
||||
// This is not ideal for large files, but it ensures Send is implemented
|
||||
let mut content = Vec::new();
|
||||
let mut temp_reader = reader;
|
||||
temp_reader.read_to_end(&mut content)?;
|
||||
Ok(Box::new(std::io::Cursor::new(content)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for CompressionService {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
44
src/services/error.rs
Normal file
44
src/services/error.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use thiserror::Error;
|
||||
|
||||
/// Core error types used across services for consistent error handling.
|
||||
///
|
||||
/// This enum centralizes errors from database, I/O, validation, and other operations.
|
||||
/// It implements Error and Debug for propagation and logging. Use this for all service-level errors.
|
||||
///
|
||||
/// # Variants
|
||||
///
|
||||
/// * `Database(rusqlite::Error)` - Wraps SQLite errors from queries or transactions.
|
||||
/// * `Io(std::io::Error)` - Wraps I/O errors from file operations or streams.
|
||||
/// * `ItemNotFound(i64)` - Specific item not found by ID.
|
||||
/// * `ItemNotFoundGeneric` - Generic item not found (no ID specified).
|
||||
/// * `InvalidInput(String)` - User or config input validation failure with message.
|
||||
/// * `Compression(String)` - Compression/decompression errors with details.
|
||||
/// * `Other(anyhow::Error)` - Catch-all for other anyhow-wrapped errors.
|
||||
/// * `Migration(rusqlite_migration::Error)` - Database migration failures.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CoreError {
|
||||
#[error("Database error: {0}")]
|
||||
/// Database operation failed.
|
||||
Database(#[from] rusqlite::Error),
|
||||
#[error("I/O error: {0}")]
|
||||
/// File or stream I/O operation failed.
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("Item not found with id {0}")]
|
||||
/// Item with the specified ID does not exist in the database.
|
||||
ItemNotFound(i64),
|
||||
#[error("Item not found")]
|
||||
/// Item does not exist (no specific ID).
|
||||
ItemNotFoundGeneric,
|
||||
#[error("Invalid input: {0}")]
|
||||
/// Input validation failed.
|
||||
InvalidInput(String),
|
||||
#[error("Compression error: {0}")]
|
||||
/// Compression or decompression operation failed.
|
||||
Compression(String),
|
||||
#[error(transparent)]
|
||||
/// Other unexpected error.
|
||||
Other(#[from] anyhow::Error),
|
||||
#[error("Migration error: {0}")]
|
||||
/// Database schema migration failed.
|
||||
Migration(#[from] rusqlite_migration::Error),
|
||||
}
|
||||
218
src/services/filter_service.rs
Normal file
218
src/services/filter_service.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use crate::filter_plugin::{FilterChain, parse_filter_string};
|
||||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::io::{Read, Result, Write};
|
||||
use std::sync::Mutex;
|
||||
|
||||
type FilterConstructor = fn() -> Box<dyn crate::filter_plugin::FilterPlugin>;
|
||||
|
||||
/// Service for managing filter chains and plugin registration.
|
||||
///
|
||||
/// The `FilterService` provides functionality to parse filter strings, create filter chains,
|
||||
/// and apply them to input/output streams. It integrates with the global filter plugin
|
||||
/// registry to support dynamic loading of filter implementations like `head`, `tail`,
|
||||
/// `grep`, and custom plugins.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// ```rust
|
||||
/// let service = FilterService::new();
|
||||
/// let chain = service.create_filter_chain(Some("head_lines(10)")).unwrap();
|
||||
/// service.filter_data(&mut chain, &mut reader, &mut writer)?;
|
||||
/// ```
|
||||
pub struct FilterService;
|
||||
|
||||
impl Default for FilterService {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl FilterService {
|
||||
/// Creates a new `FilterService` instance.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `FilterService`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = FilterService::new();
|
||||
/// ```
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
/// Creates a filter chain from a filter string specification.
|
||||
///
|
||||
/// Parses the filter string using the filter parser and constructs a `FilterChain`
|
||||
/// with the appropriate plugins. Returns `None` if no filter string is provided.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `filter_str` - Optional filter string, e.g., "head_lines(10),grep(pattern=error)".
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<Option<FilterChain>, io::Error>` - The parsed chain or an error if parsing fails.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `io::Error` - If the filter string is invalid or parsing fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let chain = service.create_filter_chain(Some("head_lines(10)"))?;
|
||||
/// assert!(chain.is_some());
|
||||
/// let empty = service.create_filter_chain(None)?;
|
||||
/// assert!(empty.is_none());
|
||||
/// ```
|
||||
pub fn create_filter_chain(&self, filter_str: Option<&str>) -> Result<Option<FilterChain>> {
|
||||
if let Some(filter_str) = filter_str {
|
||||
parse_filter_string(filter_str).map(Some)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Applies a filter chain to input data and writes to output.
|
||||
///
|
||||
/// If a filter chain is provided, it processes the data through each filter in sequence.
|
||||
/// If no chain is provided, it copies the input directly to the output.
|
||||
///
|
||||
/// # Type Parameters
|
||||
///
|
||||
/// * `R` - Type implementing `Read` for the input source.
|
||||
/// * `W` - Type implementing `Write` for the output destination.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `chain` - Mutable reference to an optional filter chain.
|
||||
/// * `reader` - Mutable reference to the input reader.
|
||||
/// * `writer` - Mutable reference to the output writer.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(), io::Error>` - Success or I/O error if filtering fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut chain = parse_filter_string("head_lines(5)")?;
|
||||
/// service.filter_data(&mut chain, &mut reader, &mut writer)?;
|
||||
/// ```
|
||||
pub fn filter_data<R: Read, W: Write>(
|
||||
&self,
|
||||
chain: &mut Option<FilterChain>,
|
||||
reader: &mut R,
|
||||
writer: &mut W,
|
||||
) -> Result<()> {
|
||||
if let Some(chain) = chain {
|
||||
chain.filter(reader, writer)
|
||||
} else {
|
||||
// If no filter chain, just copy the input to output
|
||||
std::io::copy(reader, writer)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Convenience method to apply filters to in-memory data and return the result.
|
||||
///
|
||||
/// Parses the filter string, applies the chain to the data via Cursor I/O,
|
||||
/// and collects output into a Vec<u8>. Ideal for non-streaming use cases.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `data` - Input bytes to filter.
|
||||
/// * `filter_str` - Optional filter string specification.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Ok(Vec<u8>)` - Filtered output bytes.
|
||||
/// * `Err(io::Error)` - If chain creation or filtering fails.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Propagates parsing or I/O errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let filtered = service.process_with_filter(b"Hello\nWorld\n", Some("head_lines(1)"))?;
|
||||
/// assert_eq!(filtered, b"Hello\n");
|
||||
/// ```
|
||||
pub fn process_with_filter(&self, data: &[u8], filter_str: Option<&str>) -> Result<Vec<u8>> {
|
||||
let mut chain = self.create_filter_chain(filter_str)?;
|
||||
let mut reader = std::io::Cursor::new(data);
|
||||
let mut writer = Vec::new();
|
||||
|
||||
if let Some(ref mut chain) = chain {
|
||||
chain.filter(&mut reader, &mut writer)?;
|
||||
} else {
|
||||
std::io::copy(&mut reader, &mut writer)?;
|
||||
}
|
||||
|
||||
Ok(writer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Global thread-safe registry for filter plugins.
|
||||
///
|
||||
/// Lazily initialized Mutex<HashMap> mapping plugin names (e.g., "head_bytes") to their constructors.
|
||||
/// Plugins self-register via ctors at module load time. Used by the parser to instantiate filters dynamically.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Lock acquisition failures (rare) cause panics in accessors.
|
||||
static FILTER_PLUGIN_REGISTRY: Lazy<Mutex<HashMap<String, FilterConstructor>>> =
|
||||
Lazy::new(|| Mutex::new(HashMap::new()));
|
||||
|
||||
/// Registers a filter plugin in the global registry.
|
||||
///
|
||||
/// Called by plugin modules at initialization to enable dynamic loading by name.
|
||||
/// Supports plugin discovery and instantiation during filter chain parsing.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `name` - Unique string identifier for the plugin (e.g., "head_lines").
|
||||
/// * `constructor` - Zero-arg function returning a new boxed `FilterPlugin` instance.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if Mutex lock fails (unlikely).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// register_filter_plugin("custom_filter", || Box::new(CustomFilter::default()));
|
||||
/// ```
|
||||
pub fn register_filter_plugin(name: &str, constructor: FilterConstructor) {
|
||||
FILTER_PLUGIN_REGISTRY
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(name.to_string(), constructor);
|
||||
}
|
||||
|
||||
/// Retrieves a snapshot of all registered filter plugins.
|
||||
///
|
||||
/// Clones the registry for safe iteration. Useful for status reporting, validation, or UI display of available filters.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// HashMap clone with plugin names as keys and constructors as values.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if Mutex lock fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// let plugins = get_available_filter_plugins();
|
||||
/// assert!(plugins.contains_key("head_bytes"));
|
||||
/// ```
|
||||
pub fn get_available_filter_plugins() -> HashMap<String, FilterConstructor> {
|
||||
FILTER_PLUGIN_REGISTRY.lock().unwrap().clone()
|
||||
}
|
||||
931
src/services/item_service.rs
Normal file
931
src/services/item_service.rs
Normal file
@@ -0,0 +1,931 @@
|
||||
use crate::common::PIPESIZE;
|
||||
use crate::compression_engine::{CompressionType, get_compression_engine};
|
||||
use crate::config::Settings;
|
||||
use crate::db::{self, Meta};
|
||||
use crate::filter_plugin;
|
||||
use crate::modes::common::settings_compression_type;
|
||||
use crate::services::compression_service::CompressionService;
|
||||
use crate::services::error::CoreError;
|
||||
use crate::services::filter_service::FilterService;
|
||||
use crate::services::meta_service::MetaService;
|
||||
use crate::services::types::{ItemWithContent, ItemWithMeta};
|
||||
use clap::Command;
|
||||
use log::debug;
|
||||
use rusqlite::Connection;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::io::{IsTerminal, Read, Write};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Service for managing items in the Keep application.
|
||||
///
|
||||
/// This service handles CRUD operations for items, including saving content,
|
||||
/// retrieving items with metadata and content, applying filters, and managing
|
||||
/// compression. It integrates with the database, file system, compression engines,
|
||||
/// metadata plugins, and filters to provide a complete item management interface.
|
||||
pub struct ItemService {
|
||||
/// Path to the data storage directory.
|
||||
data_path: PathBuf,
|
||||
/// Service for handling compression and decompression.
|
||||
compression_service: CompressionService,
|
||||
/// Service for managing metadata plugins.
|
||||
meta_service: MetaService,
|
||||
/// Service for applying content filters.
|
||||
filter_service: FilterService,
|
||||
}
|
||||
|
||||
impl ItemService {
|
||||
/// Creates a new `ItemService` instance.
|
||||
///
|
||||
/// Initializes the service with the specified data directory path.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `data_path` - Path to the directory where item files are stored.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `ItemService` instance.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = ItemService::new(PathBuf::from("/data"));
|
||||
/// ```
|
||||
pub fn new(data_path: PathBuf) -> Self {
|
||||
debug!(
|
||||
"ITEM_SERVICE: Creating new ItemService with data_path: {:?}",
|
||||
data_path
|
||||
);
|
||||
Self {
|
||||
data_path,
|
||||
compression_service: CompressionService::new(),
|
||||
meta_service: MetaService::new(),
|
||||
filter_service: FilterService::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves an item with its associated metadata and tags.
|
||||
///
|
||||
/// Fetches the item from the database by ID and loads its tags and metadata.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `id` - Item ID to retrieve.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<ItemWithMeta, CoreError>` - Item with metadata and tags, or an error if not found.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::ItemNotFound(id)` - If the item does not exist.
|
||||
/// * Database-related errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let item_with_meta = item_service.get_item(&conn, 1)?;
|
||||
/// assert_eq!(item_with_meta.item.id, Some(1));
|
||||
/// ```
|
||||
pub fn get_item(&self, conn: &Connection, id: i64) -> Result<ItemWithMeta, CoreError> {
|
||||
debug!("ITEM_SERVICE: Getting item with id: {}", id);
|
||||
let item = db::get_item(conn, id)?.ok_or(CoreError::ItemNotFound(id))?;
|
||||
debug!("ITEM_SERVICE: Found item: {:?}", item);
|
||||
let tags = db::get_item_tags(conn, &item)?;
|
||||
debug!("ITEM_SERVICE: Found {} tags for item {}", tags.len(), id);
|
||||
let meta = db::get_item_meta(conn, &item)?;
|
||||
debug!(
|
||||
"ITEM_SERVICE: Found {} meta entries for item {}",
|
||||
meta.len(),
|
||||
id
|
||||
);
|
||||
Ok(ItemWithMeta { item, tags, meta })
|
||||
}
|
||||
|
||||
/// Retrieves an item with its content, metadata, and tags.
|
||||
///
|
||||
/// Loads the item, its metadata/tags, and decompresses the full content.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `id` - Item ID to retrieve.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<ItemWithContent, CoreError>` - Item with content, or an error if not found.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::ItemNotFound(id)` - If the item does not exist.
|
||||
/// * `CoreError::Io(...)` - If file read or decompression fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let item_with_content = item_service.get_item_content(&conn, 1)?;
|
||||
/// assert!(!item_with_content.content.is_empty());
|
||||
/// ```
|
||||
pub fn get_item_content(
|
||||
&self,
|
||||
conn: &Connection,
|
||||
id: i64,
|
||||
) -> Result<ItemWithContent, CoreError> {
|
||||
debug!("ITEM_SERVICE: Getting item content for id: {}", id);
|
||||
let item_with_meta = self.get_item(conn, id)?;
|
||||
let item_id = item_with_meta
|
||||
.item
|
||||
.id
|
||||
.ok_or_else(|| CoreError::InvalidInput("Item missing ID".to_string()))?;
|
||||
|
||||
if item_id <= 0 {
|
||||
return Err(CoreError::InvalidInput(format!(
|
||||
"Invalid item ID: {}",
|
||||
item_id
|
||||
)));
|
||||
}
|
||||
|
||||
let mut item_path = self.data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
debug!("ITEM_SERVICE: Reading content from path: {:?}", item_path);
|
||||
|
||||
let content = self
|
||||
.compression_service
|
||||
.get_item_content(item_path, &item_with_meta.item.compression)?;
|
||||
debug!(
|
||||
"ITEM_SERVICE: Read {} bytes of content for item {}",
|
||||
content.len(),
|
||||
id
|
||||
);
|
||||
|
||||
Ok(ItemWithContent {
|
||||
item_with_meta,
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
/// Retrieves item content with binary detection and optional filtering.
|
||||
///
|
||||
/// Loads content, applies filters if specified, and determines MIME type and binary status.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `id` - Item ID.
|
||||
/// * `filter` - Optional filter string to apply to content.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(Vec<u8>, String, bool), CoreError>` - (content, MIME type, is_binary).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::ItemNotFound(id)` - If item not found.
|
||||
/// * Filter or compression errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let (content, mime, is_binary) = item_service.get_item_content_info(&conn, 1, Some("head_lines(10)"))?;
|
||||
/// ```
|
||||
pub fn get_item_content_info(
|
||||
&self,
|
||||
conn: &Connection,
|
||||
id: i64,
|
||||
filter: Option<String>,
|
||||
) -> Result<(Vec<u8>, String, bool), CoreError> {
|
||||
// Use streaming approach to handle all filtering options consistently
|
||||
let (mut reader, mime_type, is_binary) =
|
||||
self.get_item_content_info_streaming(conn, id, filter)?;
|
||||
|
||||
// Read all the filtered content into a buffer
|
||||
let mut content = Vec::new();
|
||||
reader.read_to_end(&mut content)?;
|
||||
|
||||
Ok((content, mime_type, is_binary))
|
||||
}
|
||||
|
||||
/// Determines if item content is binary based on metadata or sampling.
|
||||
///
|
||||
/// Checks existing "text" metadata first; if absent, samples the first 8192 bytes.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `item_path` - Path to the item file.
|
||||
/// * `compression` - Compression type.
|
||||
/// * `metadata` - Item metadata.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<bool, CoreError>` - True if binary, false if text.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * File or compression errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let is_bin = item_service.is_content_binary(path, "gzip", &meta)?;
|
||||
/// ```
|
||||
fn is_content_binary(
|
||||
&self,
|
||||
item_path: PathBuf,
|
||||
compression: &str,
|
||||
metadata: &HashMap<String, String>,
|
||||
) -> Result<bool, CoreError> {
|
||||
// Check if we already have text metadata
|
||||
if let Some(text_val) = metadata.get("text") {
|
||||
return Ok(text_val == "false");
|
||||
}
|
||||
|
||||
// Read only the first 8192 bytes for binary detection
|
||||
let mut sample_reader = self
|
||||
.compression_service
|
||||
.stream_item_content(item_path, compression)?;
|
||||
let mut sample_buffer = vec![0; 8192];
|
||||
let bytes_read = sample_reader.read(&mut sample_buffer)?;
|
||||
Ok(crate::common::is_binary::is_binary(
|
||||
&sample_buffer[..bytes_read],
|
||||
))
|
||||
}
|
||||
|
||||
/// Retrieves a streaming reader for item content with optional filtering.
|
||||
///
|
||||
/// Returns a boxed reader that applies compression decompression and filters.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `id` - Item ID.
|
||||
/// * `filter` - Optional filter string.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(Box<dyn Read + Send>, String, bool), CoreError>` - (reader, MIME type, is_binary).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::ItemNotFound(id)` - If item not found.
|
||||
/// * Filter parsing or compression errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let (reader, mime, is_bin) = item_service.get_item_content_info_streaming(&conn, 1, Some("grep(error)"))?;
|
||||
/// ```
|
||||
pub fn get_item_content_info_streaming(
|
||||
&self,
|
||||
conn: &Connection,
|
||||
id: i64,
|
||||
filter: Option<String>,
|
||||
) -> Result<(Box<dyn Read + Send>, String, bool), CoreError> {
|
||||
// Convert filter string to FilterChain if provided
|
||||
let filter_chain = if let Some(filter_str) = filter {
|
||||
self.filter_service
|
||||
.create_filter_chain(Some(&filter_str))
|
||||
.map_err(|e| {
|
||||
CoreError::InvalidInput(format!("Failed to create filter chain: {}", e))
|
||||
})?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
self.get_item_content_info_streaming_with_chain(conn, id, filter_chain.as_ref())
|
||||
}
|
||||
|
||||
/// Retrieves a streaming reader with a pre-built filter chain.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `id` - Item ID.
|
||||
/// * `filter_chain` - Optional pre-parsed filter chain.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(Box<dyn Read + Send>, String, bool), CoreError>` - (reader, MIME type, is_binary).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::ItemNotFound(id)` - If item not found.
|
||||
/// * Compression or filtering errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let chain = parse_filter_string("head(100)")?;
|
||||
/// let (reader, mime, is_bin) = item_service.get_item_content_info_streaming_with_chain(&conn, 1, Some(&chain))?;
|
||||
/// ```
|
||||
pub fn get_item_content_info_streaming_with_chain(
|
||||
&self,
|
||||
conn: &Connection,
|
||||
id: i64,
|
||||
filter_chain: Option<&filter_plugin::FilterChain>,
|
||||
) -> Result<(Box<dyn Read + Send>, String, bool), CoreError> {
|
||||
let item_with_meta = self.get_item(conn, id)?;
|
||||
let item_id = item_with_meta
|
||||
.item
|
||||
.id
|
||||
.ok_or_else(|| CoreError::InvalidInput("Item missing ID".to_string()))?;
|
||||
|
||||
if item_id <= 0 {
|
||||
return Err(CoreError::InvalidInput(format!(
|
||||
"Invalid item ID: {}",
|
||||
item_id
|
||||
)));
|
||||
}
|
||||
|
||||
let mut item_path = self.data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
|
||||
let reader = self
|
||||
.compression_service
|
||||
.stream_item_content(item_path.clone(), &item_with_meta.item.compression)?;
|
||||
|
||||
// Wrap the reader with filtering
|
||||
let filtered_reader = Box::new(FilteringReader::new(reader, filter_chain.cloned()));
|
||||
|
||||
let metadata = item_with_meta.meta_as_map();
|
||||
let mime_type = metadata
|
||||
.get("mime_type")
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||
|
||||
// Check if content is binary
|
||||
let is_binary =
|
||||
self.is_content_binary(item_path, &item_with_meta.item.compression, &metadata)?;
|
||||
|
||||
Ok((filtered_reader, mime_type, is_binary))
|
||||
}
|
||||
|
||||
/// Finds an item by ID or tags/metadata criteria.
|
||||
///
|
||||
/// Supports lookup by ID, last item, or search by tags/metadata.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `ids` - Vector of IDs (if non-empty, uses first ID).
|
||||
/// * `tags` - Vector of tags (all must match if provided).
|
||||
/// * `meta` - HashMap of metadata key-value pairs (exact match).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<ItemWithMeta, CoreError>` - The found item or error.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::ItemNotFound(...)` - If no matching item.
|
||||
/// * Database errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let item = item_service.find_item(&conn, vec![1], &vec![], &HashMap::new())?;
|
||||
/// ```
|
||||
pub fn find_item(
|
||||
&self,
|
||||
conn: &Connection,
|
||||
ids: &[i64],
|
||||
tags: &[String],
|
||||
meta: &HashMap<String, String>,
|
||||
) -> Result<ItemWithMeta, CoreError> {
|
||||
debug!(
|
||||
"ITEM_SERVICE: Finding item with ids: {:?}, tags: {:?}, meta: {:?}",
|
||||
ids, tags, meta
|
||||
);
|
||||
let item_maybe = match (ids.is_empty(), tags.is_empty() && meta.is_empty()) {
|
||||
(false, _) => {
|
||||
debug!("ITEM_SERVICE: Finding by ID: {}", ids[0]);
|
||||
db::get_item(conn, ids[0])?
|
||||
}
|
||||
(true, true) => {
|
||||
debug!("ITEM_SERVICE: Finding last item");
|
||||
db::get_item_last(conn)?
|
||||
}
|
||||
(true, false) => {
|
||||
debug!("ITEM_SERVICE: Finding by tags/meta");
|
||||
db::get_item_matching(conn, &tags.to_vec(), meta)?
|
||||
}
|
||||
};
|
||||
|
||||
let item = item_maybe.ok_or(CoreError::ItemNotFoundGeneric)?;
|
||||
debug!("ITEM_SERVICE: Found matching item: {:?}", item);
|
||||
|
||||
// Get tags and meta directly instead of calling get_item which makes redundant queries
|
||||
let item_id = item
|
||||
.id
|
||||
.ok_or_else(|| CoreError::InvalidInput("Item missing ID".to_string()))?;
|
||||
let tags = db::get_item_tags(conn, &item)?;
|
||||
debug!(
|
||||
"ITEM_SERVICE: Found {} tags for item {}",
|
||||
tags.len(),
|
||||
item_id
|
||||
);
|
||||
let meta = db::get_item_meta(conn, &item)?;
|
||||
debug!(
|
||||
"ITEM_SERVICE: Found {} meta entries for item {}",
|
||||
meta.len(),
|
||||
item_id
|
||||
);
|
||||
|
||||
Ok(ItemWithMeta { item, tags, meta })
|
||||
}
|
||||
|
||||
/// Lists items matching tags and metadata criteria.
|
||||
///
|
||||
/// Filters by tags (all must match) and exact metadata values, then loads full details.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `tags` - Vector of tags (all must match).
|
||||
/// * `meta` - HashMap of metadata key-value pairs (exact match).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<Vec<ItemWithMeta>, CoreError>` - List of matching items.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * Database query errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let items = item_service.list_items(&conn, &vec!["work"], &HashMap::new())?;
|
||||
/// ```
|
||||
pub fn list_items(
|
||||
&self,
|
||||
conn: &Connection,
|
||||
tags: &[String],
|
||||
meta: &HashMap<String, String>,
|
||||
) -> Result<Vec<ItemWithMeta>, CoreError> {
|
||||
debug!(
|
||||
"ITEM_SERVICE: Listing items with tags: {:?}, meta: {:?}",
|
||||
tags, meta
|
||||
);
|
||||
let items = db::get_items_matching(conn, &tags.to_vec(), meta)?;
|
||||
debug!("ITEM_SERVICE: Found {} matching items", items.len());
|
||||
|
||||
let item_ids: Vec<i64> = items.iter().filter_map(|item| item.id).collect();
|
||||
if item_ids.is_empty() {
|
||||
debug!("ITEM_SERVICE: No items found, returning empty list");
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
debug!(
|
||||
"ITEM_SERVICE: Getting tags and meta for {} items",
|
||||
item_ids.len()
|
||||
);
|
||||
let tags_map = db::get_tags_for_items(conn, &item_ids)?;
|
||||
let meta_map_db = db::get_meta_for_items(conn, &item_ids)?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for item in items {
|
||||
let item_id = item.id.unwrap();
|
||||
let tags = tags_map.get(&item_id).cloned().unwrap_or_default();
|
||||
let meta_hm = meta_map_db.get(&item_id).cloned().unwrap_or_default();
|
||||
let meta = meta_hm
|
||||
.into_iter()
|
||||
.map(|(name, value)| Meta {
|
||||
id: item_id,
|
||||
name,
|
||||
value,
|
||||
})
|
||||
.collect();
|
||||
|
||||
result.push(ItemWithMeta { item, tags, meta });
|
||||
}
|
||||
|
||||
debug!(
|
||||
"ITEM_SERVICE: Returning {} items with full metadata",
|
||||
result.len()
|
||||
);
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Deletes an item by ID from database and storage.
|
||||
///
|
||||
/// Removes the item row, associated tags/metadata, and the physical file.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Mutable database connection.
|
||||
/// * `id` - Item ID to delete.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<(), CoreError>` - Success or error.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::ItemNotFound(id)` - If item does not exist.
|
||||
/// * File deletion errors (non-fatal if not found).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// item_service.delete_item(&mut conn, 1)?;
|
||||
/// ```
|
||||
pub fn delete_item(&self, conn: &mut Connection, id: i64) -> Result<(), CoreError> {
|
||||
debug!("ITEM_SERVICE: Deleting item with id: {}", id);
|
||||
if id <= 0 {
|
||||
return Err(CoreError::InvalidInput(format!("Invalid item ID: {}", id)));
|
||||
}
|
||||
let item = db::get_item(conn, id)?.ok_or(CoreError::ItemNotFound(id))?;
|
||||
debug!("ITEM_SERVICE: Found item to delete: {:?}", item);
|
||||
|
||||
let mut item_path = self.data_path.clone();
|
||||
item_path.push(id.to_string());
|
||||
debug!("ITEM_SERVICE: Deleting file at path: {:?}", item_path);
|
||||
|
||||
db::delete_item(conn, item)?;
|
||||
fs::remove_file(&item_path).or_else(|e| {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})?;
|
||||
debug!("ITEM_SERVICE: Successfully deleted item {}", id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Saves content from a reader to a new item.
|
||||
///
|
||||
/// Reads from the input reader (e.g., stdin), applies metadata plugins,
|
||||
/// compresses the content, and stores it with tags. Echoes input to stdout via TeeReader.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `input` - Reader providing the content to save (e.g., stdin).
|
||||
/// * `cmd` - Mutable Clap command for error handling.
|
||||
/// * `settings` - Application settings.
|
||||
/// * `tags` - Tags to associate (defaults to "none" if empty).
|
||||
/// * `conn` - Mutable database connection.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<i64, CoreError>` - The ID of the new item.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::InvalidInput(...)` - If validation fails.
|
||||
/// * Database or file I/O errors.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let reader = std::io::stdin();
|
||||
/// let id = item_service.save_item(reader, &mut cmd, &settings, &mut vec![], &mut conn)?;
|
||||
/// ```
|
||||
pub fn save_item<R: Read>(
|
||||
&self,
|
||||
mut input: R,
|
||||
cmd: &mut Command,
|
||||
settings: &Settings,
|
||||
tags: &mut Vec<String>,
|
||||
conn: &mut Connection,
|
||||
) -> Result<i64, CoreError> {
|
||||
debug!("ITEM_SERVICE: Starting save_item with tags: {:?}", tags);
|
||||
if tags.is_empty() {
|
||||
tags.push("none".to_string());
|
||||
debug!("ITEM_SERVICE: No tags provided, using default 'none' tag");
|
||||
}
|
||||
|
||||
let compression_type = settings_compression_type(cmd, settings);
|
||||
debug!(
|
||||
"ITEM_SERVICE: Using compression type: {:?}",
|
||||
compression_type
|
||||
);
|
||||
let compression_engine = get_compression_engine(compression_type.clone())?;
|
||||
|
||||
let item_id;
|
||||
let mut item;
|
||||
{
|
||||
item = db::create_item(conn, compression_type.clone())?;
|
||||
item_id = item.id.unwrap();
|
||||
debug!("ITEM_SERVICE: Created new item with id: {}", item_id);
|
||||
db::set_item_tags(conn, item.clone(), tags)?;
|
||||
debug!("ITEM_SERVICE: Set tags for item {}", item_id);
|
||||
let item_meta = self.meta_service.collect_initial_meta();
|
||||
debug!(
|
||||
"ITEM_SERVICE: Collected {} initial meta entries",
|
||||
item_meta.len()
|
||||
);
|
||||
for (k, v) in item_meta.iter() {
|
||||
db::add_meta(conn, item_id, k, v)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Print the "KEEP: New item" message before starting to read input
|
||||
if !settings.quiet {
|
||||
if std::io::stderr().is_terminal() {
|
||||
let mut t = term::stderr().unwrap();
|
||||
let _ = t.reset();
|
||||
let _ = t.attr(term::Attr::Bold);
|
||||
let _ = write!(t, "KEEP:");
|
||||
let _ = t.reset();
|
||||
let _ = write!(t, " New item ");
|
||||
let _ = t.attr(term::Attr::Bold);
|
||||
let _ = write!(t, "{item_id}");
|
||||
let _ = t.reset();
|
||||
let _ = write!(t, " tags: ");
|
||||
let _ = t.attr(term::Attr::Bold);
|
||||
let _ = write!(t, "{}", tags.join(" "));
|
||||
let _ = t.reset();
|
||||
let _ = writeln!(t);
|
||||
let _ = std::io::stderr().flush();
|
||||
} else {
|
||||
let mut t = std::io::stderr();
|
||||
let _ = writeln!(t, "KEEP: New item: {} tags: {:?}", item_id, tags);
|
||||
}
|
||||
}
|
||||
|
||||
let mut plugins = self.meta_service.get_plugins(cmd, settings);
|
||||
debug!("ITEM_SERVICE: Got {} meta plugins", plugins.len());
|
||||
self.meta_service
|
||||
.initialize_plugins(&mut plugins, conn, item_id);
|
||||
|
||||
let mut item_path = self.data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
debug!("ITEM_SERVICE: Writing item to path: {:?}", item_path);
|
||||
|
||||
let mut item_out = compression_engine.create(item_path.clone())?;
|
||||
|
||||
let mut buffer = [0; PIPESIZE];
|
||||
let mut total_bytes = 0;
|
||||
|
||||
debug!("ITEM_SERVICE: Starting to read and process input data");
|
||||
loop {
|
||||
let n = input.read(&mut buffer)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
total_bytes += n as i64;
|
||||
item_out.write_all(&buffer[..n])?;
|
||||
self.meta_service
|
||||
.process_chunk(&mut plugins, &buffer[..n], conn, item_id);
|
||||
}
|
||||
debug!("ITEM_SERVICE: Processed {} bytes total", total_bytes);
|
||||
|
||||
item_out.flush()?;
|
||||
drop(item_out);
|
||||
|
||||
debug!("ITEM_SERVICE: Finalizing meta plugins");
|
||||
self.meta_service
|
||||
.finalize_plugins(&mut plugins, conn, item_id);
|
||||
|
||||
item.size = Some(total_bytes);
|
||||
db::update_item(conn, item.clone())?;
|
||||
|
||||
debug!("ITEM_SERVICE: Save completed successfully");
|
||||
|
||||
Ok(item_id)
|
||||
}
|
||||
|
||||
/// Saves pre-loaded content as a new item, typically from MCP (Machine-Common-Processing) sources.
|
||||
///
|
||||
/// Bypasses streaming read, directly writes content and applies metadata/plugins.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `content` - Byte slice of content to save.
|
||||
/// * `tags` - Tags to associate.
|
||||
/// * `metadata` - Initial metadata key-value pairs.
|
||||
/// * `cmd` - Mutable command.
|
||||
/// * `settings` - Settings.
|
||||
/// * `conn` - Mutable database connection.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `Result<ItemWithMeta, CoreError>` - The saved item with full details.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `CoreError::Database(...)` - If DB insert fails.
|
||||
/// * `CoreError::Io(...)` - If file write fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let content = b"Hello, world!";
|
||||
/// let tags = vec!["mcp".to_string()];
|
||||
/// let meta = HashMap::from([("source".to_string(), "api".to_string())]);
|
||||
/// let item = service.save_item_from_mcp(content, &tags, &meta, &mut cmd, &settings, &mut conn)?;
|
||||
/// ```
|
||||
pub fn save_item_from_mcp(
|
||||
&self,
|
||||
content: &[u8],
|
||||
tags: &Vec<String>,
|
||||
metadata: &HashMap<String, String>,
|
||||
cmd: &mut Command,
|
||||
settings: &Settings,
|
||||
conn: &mut Connection,
|
||||
) -> Result<ItemWithMeta, CoreError> {
|
||||
debug!(
|
||||
"ITEM_SERVICE: Starting save_item_from_mcp with {} bytes, {} tags, {} metadata entries",
|
||||
content.len(),
|
||||
tags.len(),
|
||||
metadata.len()
|
||||
);
|
||||
let compression_type = CompressionType::LZ4;
|
||||
let compression_engine = get_compression_engine(compression_type.clone())?;
|
||||
|
||||
let item_id;
|
||||
let mut item;
|
||||
|
||||
{
|
||||
item = db::create_item(conn, compression_type.clone())?;
|
||||
item_id = item.id.unwrap();
|
||||
debug!("ITEM_SERVICE: Created MCP item with id: {}", item_id);
|
||||
|
||||
// Add tags
|
||||
for tag in tags {
|
||||
db::add_tag(conn, item_id, tag)?;
|
||||
}
|
||||
debug!("ITEM_SERVICE: Added {} tags to MCP item", tags.len());
|
||||
|
||||
// Add custom metadata
|
||||
for (key, value) in metadata {
|
||||
db::add_meta(conn, item_id, key, value)?;
|
||||
}
|
||||
debug!(
|
||||
"ITEM_SERVICE: Added {} custom metadata entries to MCP item",
|
||||
metadata.len()
|
||||
);
|
||||
}
|
||||
|
||||
let mut item_path = self.data_path.clone();
|
||||
item_path.push(item_id.to_string());
|
||||
debug!("ITEM_SERVICE: Writing MCP item to path: {:?}", item_path);
|
||||
|
||||
let mut writer = compression_engine.create(item_path.clone())?;
|
||||
writer.write_all(content)?;
|
||||
drop(writer);
|
||||
|
||||
let mut plugins = self.meta_service.get_plugins(cmd, settings);
|
||||
debug!(
|
||||
"ITEM_SERVICE: Got {} configured meta plugins for MCP item",
|
||||
plugins.len()
|
||||
);
|
||||
|
||||
self.meta_service
|
||||
.initialize_plugins(&mut plugins, conn, item_id);
|
||||
self.meta_service
|
||||
.process_chunk(&mut plugins, content, conn, item_id);
|
||||
self.meta_service
|
||||
.finalize_plugins(&mut plugins, conn, item_id);
|
||||
debug!("ITEM_SERVICE: Processed MCP item through configured meta plugins");
|
||||
|
||||
item.size = Some(content.len() as i64);
|
||||
db::update_item(conn, item.clone())?;
|
||||
|
||||
debug!("ITEM_SERVICE: MCP item saved successfully");
|
||||
|
||||
self.get_item(conn, item_id)
|
||||
}
|
||||
|
||||
/// Returns a reference to the internal compression service.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Reference to `CompressionService`.
|
||||
pub fn get_compression_service(&self) -> &CompressionService {
|
||||
&self.compression_service
|
||||
}
|
||||
|
||||
/// Returns a reference to the data directory path.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Reference to `PathBuf`.
|
||||
pub fn get_data_path(&self) -> &PathBuf {
|
||||
&self.data_path
|
||||
}
|
||||
}
|
||||
|
||||
/// A reader that applies a filter chain to the data as it's read.
|
||||
///
|
||||
/// Wraps an underlying reader and applies a filter chain to the data during read operations.
|
||||
/// Buffers data as needed for filter processing.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `reader` - The underlying reader providing the data source.
|
||||
/// * `filter_chain` - Optional filter chain to apply.
|
||||
/// * `buffer` - Internal buffer for holding filtered data.
|
||||
/// * `buffer_pos` - Current position in the internal buffer.
|
||||
struct FilteringReader<R: Read> {
|
||||
reader: R,
|
||||
filter_chain: Option<filter_plugin::FilterChain>,
|
||||
buffer: Vec<u8>,
|
||||
buffer_pos: usize,
|
||||
}
|
||||
|
||||
impl<R: Read> FilteringReader<R> {
|
||||
/// Creates a new `FilteringReader` with the given reader and filter chain.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `reader` - The underlying reader.
|
||||
/// * `filter_chain` - Optional filter chain to apply.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `FilteringReader`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let reader = std::io::Cursor::new(b"data");
|
||||
/// let filter_chain = parse_filter_string("head(10)")?;
|
||||
/// let filtered = FilteringReader::new(reader, Some(filter_chain));
|
||||
/// ```
|
||||
pub fn new(reader: R, filter_chain: Option<filter_plugin::FilterChain>) -> Self {
|
||||
Self {
|
||||
reader,
|
||||
filter_chain,
|
||||
buffer: Vec::new(),
|
||||
buffer_pos: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Read for FilteringReader<R> {
|
||||
/// Reads data, applying the filter chain if present.
|
||||
///
|
||||
/// If buffered data exists, serves it first. Otherwise, reads a chunk, filters it,
|
||||
/// and serves the output. Handles EOF properly.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `buf` - Buffer to fill with filtered data.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `io::Result<usize>` - Number of bytes read, or I/O error.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Propagates errors from underlying reader or filter operations.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let mut filtered = FilteringReader::new(std::io::Cursor::new(b"Hello"), None);
|
||||
/// let mut buf = [0; 5];
|
||||
/// let n = filtered.read(&mut buf).unwrap();
|
||||
/// assert_eq!(n, 5);
|
||||
/// ```
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
// If we have data in our buffer, serve that first
|
||||
if self.buffer_pos < self.buffer.len() {
|
||||
let bytes_to_copy = std::cmp::min(buf.len(), self.buffer.len() - self.buffer_pos);
|
||||
buf[..bytes_to_copy]
|
||||
.copy_from_slice(&self.buffer[self.buffer_pos..self.buffer_pos + bytes_to_copy]);
|
||||
self.buffer_pos += bytes_to_copy;
|
||||
return Ok(bytes_to_copy);
|
||||
}
|
||||
|
||||
// Reset buffer for new data
|
||||
self.buffer.clear();
|
||||
self.buffer_pos = 0;
|
||||
|
||||
// Read from the original reader into a temporary buffer
|
||||
let mut temp_buf = vec![0; buf.len()];
|
||||
let bytes_read = self.reader.read(&mut temp_buf)?;
|
||||
|
||||
if bytes_read == 0 {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
// Process through the filter chain if it exists
|
||||
if let Some(ref mut chain) = self.filter_chain {
|
||||
// Use a cursor to read the input data
|
||||
let mut input_cursor = std::io::Cursor::new(&temp_buf[..bytes_read]);
|
||||
// Write filtered output to our buffer
|
||||
chain.filter(&mut input_cursor, &mut self.buffer)?;
|
||||
|
||||
if !self.buffer.is_empty() {
|
||||
let bytes_to_copy = std::cmp::min(buf.len(), self.buffer.len());
|
||||
buf[..bytes_to_copy].copy_from_slice(&self.buffer[..bytes_to_copy]);
|
||||
self.buffer_pos = bytes_to_copy;
|
||||
Ok(bytes_to_copy)
|
||||
} else {
|
||||
// No data produced by filter, try reading more
|
||||
Ok(0)
|
||||
}
|
||||
} else {
|
||||
// No filter chain, just pass through
|
||||
buf[..bytes_read].copy_from_slice(&temp_buf[..bytes_read]);
|
||||
Ok(bytes_read)
|
||||
}
|
||||
}
|
||||
}
|
||||
230
src/services/meta_service.rs
Normal file
230
src/services/meta_service.rs
Normal file
@@ -0,0 +1,230 @@
|
||||
use crate::config::Settings;
|
||||
use crate::meta_plugin::{MetaPlugin, MetaPluginType};
|
||||
use crate::modes::common::settings_meta_plugin_types;
|
||||
use clap::Command;
|
||||
use log::debug;
|
||||
use rusqlite::Connection;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub struct MetaService;
|
||||
|
||||
impl MetaService {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn get_plugins(&self, cmd: &mut Command, settings: &Settings) -> Vec<Box<dyn MetaPlugin>> {
|
||||
debug!("META_SERVICE: get_plugins called");
|
||||
let meta_plugin_types: Vec<MetaPluginType> = settings_meta_plugin_types(cmd, settings);
|
||||
debug!(
|
||||
"META_SERVICE: Meta plugin types from settings: {:?}",
|
||||
meta_plugin_types
|
||||
);
|
||||
|
||||
// Create plugins with their configuration
|
||||
let meta_plugins: Vec<Box<dyn MetaPlugin>> = meta_plugin_types
|
||||
.iter()
|
||||
.map(|meta_plugin_type| {
|
||||
debug!("META_SERVICE: Creating plugin: {:?}", meta_plugin_type);
|
||||
|
||||
// Get the plugin name using strum's Display implementation
|
||||
let plugin_name = meta_plugin_type.to_string();
|
||||
|
||||
// Get options and outputs from settings
|
||||
let (options, outputs) = if let Some(meta_plugin_configs) = &settings.meta_plugins {
|
||||
if let Some(config) = meta_plugin_configs.iter().find(|c| c.name == plugin_name)
|
||||
{
|
||||
// Convert options and outputs to the appropriate types
|
||||
let options: std::collections::HashMap<String, serde_yaml::Value> = config
|
||||
.options
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect();
|
||||
|
||||
let outputs: std::collections::HashMap<String, serde_yaml::Value> = config
|
||||
.outputs
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), serde_yaml::Value::String(v.clone())))
|
||||
.collect();
|
||||
|
||||
(Some(options), Some(outputs))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
crate::meta_plugin::get_meta_plugin(meta_plugin_type.clone(), options, outputs)
|
||||
})
|
||||
.collect();
|
||||
|
||||
meta_plugins
|
||||
}
|
||||
|
||||
pub fn initialize_plugins(
|
||||
&self,
|
||||
plugins: &mut [Box<dyn MetaPlugin>],
|
||||
conn: &Connection,
|
||||
item_id: i64,
|
||||
) {
|
||||
// Check for duplicate output names before initializing plugins
|
||||
let mut output_names: std::collections::HashMap<String, Vec<String>> =
|
||||
std::collections::HashMap::new();
|
||||
|
||||
for plugin in plugins.iter() {
|
||||
let plugin_name = plugin.meta_type().to_string();
|
||||
// For each plugin, collect all the output names it might write to
|
||||
for (internal_name, output_config) in plugin.outputs() {
|
||||
let output_name = match output_config {
|
||||
serde_yaml::Value::String(remapped_name) => remapped_name.clone(),
|
||||
serde_yaml::Value::Bool(true) => internal_name.clone(),
|
||||
serde_yaml::Value::Bool(false) => continue, // This output is disabled
|
||||
_ => internal_name.clone(), // Default to internal name for other types
|
||||
};
|
||||
|
||||
// Only track outputs that will actually be written
|
||||
if !matches!(output_config, serde_yaml::Value::Bool(false)) {
|
||||
output_names
|
||||
.entry(output_name)
|
||||
.or_default()
|
||||
.push(plugin_name.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Print warnings for duplicate output names
|
||||
for (output_name, plugin_names) in &output_names {
|
||||
if plugin_names.len() > 1 {
|
||||
log::warn!(
|
||||
"META_SERVICE: Output name '{}' is provided by multiple plugins: {}",
|
||||
output_name,
|
||||
plugin_names.join(", ")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for meta_plugin in plugins.iter_mut() {
|
||||
let response = meta_plugin.initialize();
|
||||
self.process_plugin_response(conn, item_id, &mut **meta_plugin, response);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn process_chunk(
|
||||
&self,
|
||||
plugins: &mut [Box<dyn MetaPlugin>],
|
||||
chunk: &[u8],
|
||||
conn: &Connection,
|
||||
item_id: i64,
|
||||
) {
|
||||
for meta_plugin in plugins.iter_mut() {
|
||||
// Skip plugins that are already finalized
|
||||
if meta_plugin.is_finalized() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let response = meta_plugin.update(chunk);
|
||||
self.process_plugin_response(conn, item_id, &mut **meta_plugin, response.clone());
|
||||
|
||||
// Set finalized flag if response indicates finalization
|
||||
if response.is_finalized {
|
||||
meta_plugin.set_finalized(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize_plugins(
|
||||
&self,
|
||||
plugins: &mut [Box<dyn MetaPlugin>],
|
||||
conn: &Connection,
|
||||
item_id: i64,
|
||||
) {
|
||||
for meta_plugin in plugins.iter_mut() {
|
||||
// Skip plugins that are already finalized
|
||||
if meta_plugin.is_finalized() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let response = meta_plugin.finalize();
|
||||
self.process_plugin_response(conn, item_id, &mut **meta_plugin, response.clone());
|
||||
|
||||
// Set finalized flag if response indicates finalization
|
||||
if response.is_finalized {
|
||||
meta_plugin.set_finalized(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Internal helper to process a meta plugin response and store metadata.
|
||||
///
|
||||
/// Iterates over the metadata entries in the response and stores each in the database
|
||||
/// using `store_meta`. Logs warnings if storage fails.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `conn` - Database connection.
|
||||
/// * `item_id` - Item ID to associate with the metadata.
|
||||
/// * `_plugin` - Reference to the plugin (unused).
|
||||
/// * `response` - The plugin response containing metadata.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Logs warnings for individual storage failures but does not return errors.
|
||||
fn process_plugin_response(
|
||||
&self,
|
||||
conn: &Connection,
|
||||
item_id: i64,
|
||||
_plugin: &mut dyn MetaPlugin,
|
||||
response: crate::meta_plugin::MetaPluginResponse,
|
||||
) {
|
||||
for meta_data in response.metadata {
|
||||
// The metadata has already been processed by the plugin, so we can use it directly
|
||||
// Save to database
|
||||
let db_meta = crate::db::Meta {
|
||||
id: item_id,
|
||||
name: meta_data.name,
|
||||
value: meta_data.value,
|
||||
};
|
||||
if let Err(e) = crate::db::store_meta(conn, db_meta) {
|
||||
log::warn!("META_SERVICE: Failed to store metadata: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects initial metadata from environment variables and hostname.
|
||||
///
|
||||
/// Gathers metadata from `KEEP_META_*` environment variables and adds hostname
|
||||
/// if not already present.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `HashMap` of initial metadata key-value pairs.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = MetaService::new();
|
||||
/// let initial_meta = service.collect_initial_meta();
|
||||
/// ```
|
||||
pub fn collect_initial_meta(&self) -> HashMap<String, String> {
|
||||
let mut item_meta: HashMap<String, String> = crate::modes::common::get_meta_from_env();
|
||||
|
||||
if let Ok(hostname) = gethostname::gethostname().into_string()
|
||||
&& !item_meta.contains_key("hostname")
|
||||
{
|
||||
item_meta.insert("hostname".to_string(), hostname);
|
||||
}
|
||||
item_meta
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MetaService {
|
||||
/// Provides a default `MetaService` instance.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A new `MetaService` via `new()`.
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
17
src/services/mod.rs
Normal file
17
src/services/mod.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
pub mod async_item_service;
|
||||
pub mod compression_service;
|
||||
pub mod error;
|
||||
pub mod filter_service;
|
||||
pub mod item_service;
|
||||
pub mod meta_service;
|
||||
pub mod status_service;
|
||||
pub mod types;
|
||||
|
||||
pub use async_item_service::AsyncItemService;
|
||||
pub use compression_service::CompressionService;
|
||||
pub use error::CoreError;
|
||||
pub use filter_service::{FilterService, register_filter_plugin};
|
||||
pub use item_service::ItemService;
|
||||
pub use meta_service::MetaService;
|
||||
pub use status_service::StatusService;
|
||||
pub use types::{ItemWithContent, ItemWithMeta};
|
||||
131
src/services/status_service.rs
Normal file
131
src/services/status_service.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use crate::common::status::{StatusInfo, generate_status_info};
|
||||
use crate::compression_engine::CompressionType;
|
||||
use crate::config::Settings;
|
||||
use crate::meta_plugin::MetaPluginType;
|
||||
use crate::services::filter_service::get_available_filter_plugins;
|
||||
use clap::Command;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// Service for generating system status information.
|
||||
///
|
||||
/// This service collects and formats status data about the application's
|
||||
/// configuration, storage paths, compression engines, metadata plugins,
|
||||
/// and filter plugins. It provides a unified interface for status reporting
|
||||
/// used by both CLI and server modes.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = StatusService::new();
|
||||
/// let status = service.generate_status(&mut cmd, &settings, data_path, db_path);
|
||||
/// ```
|
||||
pub struct StatusService;
|
||||
|
||||
impl StatusService {
|
||||
/// Creates a new `StatusService` instance.
|
||||
///
|
||||
/// No specific initialization is needed; it's a stateless service.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `StatusService` - A new instance.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let service = StatusService::new();
|
||||
/// ```
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
/// Generates comprehensive status information for the application.
|
||||
///
|
||||
/// Collects data about paths, compression engines, available and configured
|
||||
/// meta plugins, and filter plugins. Uses the provided settings to determine
|
||||
/// enabled components. Handles error reporting via Clap if needed.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cmd` - Mutable reference to the Clap command for error reporting (e.g., invalid plugins).
|
||||
/// * `settings` - Application settings containing configuration details like enabled plugins.
|
||||
/// * `data_path` - Path to the data storage directory for item files.
|
||||
/// * `db_path` - Path to the SQLite database file.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `StatusInfo` - A structured object containing all status details, including paths, plugins, and config.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Exits via Clap error if invalid meta plugin types are configured in settings.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let status = service.generate_status(&mut cmd, &settings, data_path, db_path);
|
||||
/// assert!(!status.filter_plugins.is_empty());
|
||||
/// ```
|
||||
pub fn generate_status(
|
||||
&self,
|
||||
cmd: &mut Command,
|
||||
settings: &Settings,
|
||||
data_path: PathBuf,
|
||||
db_path: PathBuf,
|
||||
) -> StatusInfo {
|
||||
// Get meta plugins directly from config
|
||||
let meta_plugin_types: Vec<MetaPluginType> =
|
||||
crate::modes::common::settings_meta_plugin_types(cmd, settings);
|
||||
|
||||
// Determine which compression type would be enabled for a save operation
|
||||
let enabled_compression_type = if let Some(compression_name) = &settings.compression() {
|
||||
CompressionType::from_str(compression_name).ok()
|
||||
} else {
|
||||
Some(crate::compression_engine::default_compression_type())
|
||||
};
|
||||
|
||||
let mut status_info = generate_status_info(
|
||||
data_path,
|
||||
db_path,
|
||||
&meta_plugin_types,
|
||||
enabled_compression_type,
|
||||
);
|
||||
|
||||
// Add detailed filter plugins information
|
||||
let filter_plugins_map = get_available_filter_plugins();
|
||||
let mut filter_plugins_info = Vec::new();
|
||||
|
||||
for (name, creator) in filter_plugins_map {
|
||||
let plugin = creator();
|
||||
let options = plugin.options();
|
||||
// For now, use a default description
|
||||
let description = "Filter plugin".to_string();
|
||||
|
||||
filter_plugins_info.push(crate::common::status::FilterPluginInfo {
|
||||
name,
|
||||
options,
|
||||
description,
|
||||
});
|
||||
}
|
||||
status_info.filter_plugins = filter_plugins_info;
|
||||
|
||||
// Add configured meta plugins information
|
||||
status_info.configured_meta_plugins = settings.meta_plugins.clone();
|
||||
|
||||
status_info
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for StatusService {
|
||||
/// Returns the default `StatusService` instance.
|
||||
///
|
||||
/// Delegates to `new()` for consistency.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// * `StatusService` - A new instance.
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
55
src/services/types.rs
Normal file
55
src/services/types.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use crate::db::{Item, Meta, Tag};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
/// Structure representing an item with its associated tags and metadata.
|
||||
///
|
||||
/// This is a composite type used for querying and displaying items with their relational data.
|
||||
/// It combines the core Item with lists of Tags and Meta for complete item representation.
|
||||
pub struct ItemWithMeta {
|
||||
/// The core item data.
|
||||
pub item: Item,
|
||||
/// Associated tags.
|
||||
pub tags: Vec<Tag>,
|
||||
/// Associated metadata.
|
||||
pub meta: Vec<Meta>,
|
||||
}
|
||||
|
||||
impl ItemWithMeta {
|
||||
/// Converts metadata to a HashMap for easy lookup.
|
||||
///
|
||||
/// This method transforms the vec of Meta into a simple key-value map,
|
||||
/// useful for quick access by metadata name.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// `HashMap<String, String>` - Metadata as key-value pairs, where keys are names and values are strings.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let item_with_meta = ItemWithMeta { /* ... */ };
|
||||
/// let meta_map = item_with_meta.meta_as_map();
|
||||
/// assert_eq!(meta_map.get("hostname"), Some(&"example.com".to_string()));
|
||||
/// ```
|
||||
pub fn meta_as_map(&self) -> HashMap<String, String> {
|
||||
self.meta
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|m| (m.name, m.value))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
/// Structure representing an item with its content, tags, and metadata.
|
||||
///
|
||||
/// This extends ItemWithMeta by including the actual content bytes, suitable for full item retrieval
|
||||
/// including binary or text data. Note: For large content, consider streaming alternatives.
|
||||
pub struct ItemWithContent {
|
||||
/// Item with associated metadata and tags.
|
||||
pub item_with_meta: ItemWithMeta,
|
||||
/// The content bytes.
|
||||
pub content: Vec<u8>,
|
||||
}
|
||||
287
src/tests.rs
287
src/tests.rs
@@ -1,287 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
// Global test input values
|
||||
const INPUT_A: &str = "test content A";
|
||||
const INPUT_B: &str = "test content B";
|
||||
|
||||
use tempfile::tempdir;
|
||||
|
||||
// Helper function to run the keep binary with arguments
|
||||
fn run_keep(args: &[&str], stdin_data: Option<&str>, keep_dir: &Path) -> std::process::Output {
|
||||
let mut cmd = Command::new(env!("CARGO_BIN_EXE_keep"));
|
||||
cmd.args(args)
|
||||
.env("KEEP_DIR", keep_dir);
|
||||
|
||||
if stdin_data.is_some() {
|
||||
cmd.stdin(std::process::Stdio::piped());
|
||||
} else {
|
||||
cmd.stdin(std::process::Stdio::null());
|
||||
}
|
||||
|
||||
let mut child = cmd.spawn().expect("Failed to execute keep command");
|
||||
|
||||
if let Some(data) = stdin_data {
|
||||
if let Some(mut stdin) = child.stdin.take() {
|
||||
stdin.write_all(data.as_bytes()).expect("Failed to write to stdin");
|
||||
}
|
||||
}
|
||||
|
||||
child.wait_with_output().expect("Failed to wait for command")
|
||||
}
|
||||
|
||||
// Helper function to create a temporary test environment
|
||||
fn with_temp_env<F>(f: F)
|
||||
where
|
||||
F: FnOnce(&Path),
|
||||
{
|
||||
let dir = tempdir().expect("Failed to create temporary directory");
|
||||
let data_path = dir.path();
|
||||
|
||||
// Create the data directory structure
|
||||
fs::create_dir_all(data_path).expect("Failed to create directory");
|
||||
|
||||
// Run the test
|
||||
f(data_path);
|
||||
|
||||
// Clean up
|
||||
dir.close().expect("Failed to remove temporary directory");
|
||||
}
|
||||
|
||||
// Helper function to create test items with specific content and tags
|
||||
fn create_test_items(data_path: &Path) {
|
||||
// Create first item with tag_a and tag
|
||||
let output = run_keep(&["tag_a", "tag"], Some(INPUT_A), data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to create first test item: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
// Create second item with tag_b and tag
|
||||
let output = run_keep(&["tag_b", "tag"], Some(INPUT_B), data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to create second test item: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_item() {
|
||||
with_temp_env(|data_path| {
|
||||
// Test content and tags
|
||||
let input = "test content";
|
||||
let tag = "test_tag";
|
||||
|
||||
// Save an item
|
||||
let output = run_keep(&[tag], Some(input), data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to save item: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
// Verify item was saved by listing
|
||||
let output = run_keep(&["--list"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to list items: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
let output_str = String::from_utf8_lossy(&output.stdout);
|
||||
assert!(
|
||||
output_str.contains(tag),
|
||||
"List output does not contain expected tag. Output: {}",
|
||||
output_str
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_item() {
|
||||
with_temp_env(|data_path| {
|
||||
// Create test items
|
||||
create_test_items(data_path);
|
||||
|
||||
// Get item by ID
|
||||
let output = run_keep(&["--get", "1"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to get item by ID: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
let output_str = String::from_utf8_lossy(&output.stdout);
|
||||
assert!(
|
||||
output_str.contains(INPUT_A),
|
||||
"Get output does not contain expected content. Output: {}",
|
||||
output_str
|
||||
);
|
||||
|
||||
// Get item by tag
|
||||
let output = run_keep(&["--get", "tag_a"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to get item by tag: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
let output_str = String::from_utf8_lossy(&output.stdout);
|
||||
assert!(
|
||||
output_str.contains(INPUT_A),
|
||||
"Get by tag output does not contain expected content. Output: {}",
|
||||
output_str
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_list_items() {
|
||||
with_temp_env(|data_path| {
|
||||
// Create test items
|
||||
create_test_items(data_path);
|
||||
|
||||
// List all items
|
||||
let output = run_keep(&["--list"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to list items: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
// List items with specific tag
|
||||
let output = run_keep(&["--list", "tag_a"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to list items by tag: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_item() {
|
||||
with_temp_env(|data_path| {
|
||||
// Create test items
|
||||
create_test_items(data_path);
|
||||
|
||||
// Try to delete with tag (should fail)
|
||||
let output = run_keep(&["--delete", "tag"], None, data_path);
|
||||
assert!(
|
||||
!output.status.success(),
|
||||
"Delete with tag should have failed but succeeded"
|
||||
);
|
||||
|
||||
// Delete item by ID
|
||||
let output = run_keep(&["--delete", "1"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to delete item by ID: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
// Try to delete non-existent item (should succeed silently)
|
||||
let output = run_keep(&["--delete", "9999"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Delete non-existent item should succeed: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diff_items() {
|
||||
with_temp_env(|data_path| {
|
||||
// Create test items
|
||||
create_test_items(data_path);
|
||||
|
||||
// Diff two items by ID
|
||||
let output = run_keep(&["--diff", "1", "2"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to diff items: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
// Try to diff with tag (should fail)
|
||||
let output = run_keep(&["--diff", "tag_a", "tag_b"], None, data_path);
|
||||
assert!(
|
||||
!output.status.success(),
|
||||
"Diff with tags should have failed but succeeded"
|
||||
);
|
||||
|
||||
// Try to diff non-existent item (should fail)
|
||||
let output = run_keep(&["--diff", "9999", "1"], None, data_path);
|
||||
assert!(
|
||||
!output.status.success(),
|
||||
"Diff with non-existent item should have failed but succeeded"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_info_item() {
|
||||
with_temp_env(|data_path| {
|
||||
// Create test items
|
||||
create_test_items(data_path);
|
||||
|
||||
// Get info for item by ID
|
||||
let output = run_keep(&["--info", "1"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to get item info: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
// Get info for last item (need to provide an empty IDS_OR_TAGS parameter)
|
||||
let output = run_keep(&["--info", ""], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to get last item info: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_item() {
|
||||
with_temp_env(|data_path| {
|
||||
// Create test items
|
||||
create_test_items(data_path);
|
||||
|
||||
// Update item tags
|
||||
let output = run_keep(&["--update", "1", "new_tag"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to update item: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
// Verify update by listing
|
||||
let output = run_keep(&["--list"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to list items after update: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_status() {
|
||||
with_temp_env(|data_path| {
|
||||
// Get status
|
||||
let output = run_keep(&["--status"], None, data_path);
|
||||
assert!(
|
||||
output.status.success(),
|
||||
"Failed to get status: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
40
src/tests/common/is_binary_tests.rs
Normal file
40
src/tests/common/is_binary_tests.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::common::is_binary::is_binary;
|
||||
|
||||
#[test]
|
||||
fn test_is_binary_text() {
|
||||
let text_data = b"Hello, World! This is plain text.\nWith newlines and spaces.";
|
||||
let result = is_binary(text_data);
|
||||
|
||||
// Text data should not be detected as binary
|
||||
assert!(!result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_binary_binary() {
|
||||
let binary_data = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09";
|
||||
let result = is_binary(binary_data);
|
||||
|
||||
// Binary data should be detected as binary
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_binary_png_signature() {
|
||||
let png_data = b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A";
|
||||
let result = is_binary(png_data);
|
||||
|
||||
// PNG signature should be detected as binary
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_binary_empty() {
|
||||
let empty_data = b"";
|
||||
let result = is_binary(empty_data);
|
||||
|
||||
// Empty data should not be detected as binary
|
||||
assert!(!result);
|
||||
}
|
||||
}
|
||||
8
src/tests/common/mod.rs
Normal file
8
src/tests/common/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
// Common test utilities and shared test code will go here
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod is_binary_tests;
|
||||
#[cfg(test)]
|
||||
pub mod status_tests;
|
||||
#[cfg(test)]
|
||||
pub mod test_helpers;
|
||||
11
src/tests/common/status_tests.rs
Normal file
11
src/tests/common/status_tests.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
// TODO: Add tests for common status functionality once implemented
|
||||
// This would test functions related to status checking in the common module
|
||||
|
||||
#[test]
|
||||
fn test_status_placeholder() {
|
||||
// Placeholder test - to be implemented when status functionality is added
|
||||
assert!(true);
|
||||
}
|
||||
}
|
||||
108
src/tests/common/test_helpers.rs
Normal file
108
src/tests/common/test_helpers.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
//! Common test utilities and helper functions to reduce duplication in tests
|
||||
|
||||
use crate::db;
|
||||
use rusqlite::Connection;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Create a temporary directory for testing
|
||||
pub fn create_temp_dir() -> TempDir {
|
||||
TempDir::new().expect("Failed to create temporary directory")
|
||||
}
|
||||
|
||||
/// Create a temporary file with the given content
|
||||
pub fn create_temp_file_with_content(dir: &TempDir, filename: &str, content: &str) -> PathBuf {
|
||||
let file_path = dir.path().join(filename);
|
||||
let mut file = File::create(&file_path).expect("Failed to create test file");
|
||||
write!(file, "{}", content).expect("Failed to write to test file");
|
||||
file_path
|
||||
}
|
||||
|
||||
/// Create an empty temporary file
|
||||
pub fn create_empty_temp_file(dir: &TempDir, filename: &str) -> PathBuf {
|
||||
let file_path = dir.path().join(filename);
|
||||
File::create(&file_path).expect("Failed to create empty test file");
|
||||
file_path
|
||||
}
|
||||
|
||||
/// Helper to test basic temporary directory setup
|
||||
pub fn test_temp_dir_setup() {
|
||||
let temp_dir = create_temp_dir();
|
||||
assert!(temp_dir.path().exists());
|
||||
}
|
||||
|
||||
/// Helper to test file creation and verification
|
||||
pub fn test_file_creation(dir: &TempDir, filename: &str, content: &str) -> PathBuf {
|
||||
let file_path = create_temp_file_with_content(dir, filename, content);
|
||||
assert!(file_path.exists());
|
||||
|
||||
let metadata = std::fs::metadata(&file_path).expect("Failed to get file metadata");
|
||||
assert!(metadata.len() > 0);
|
||||
|
||||
file_path
|
||||
}
|
||||
|
||||
/// Create a temporary database for testing
|
||||
pub fn create_temp_db() -> (TempDir, Connection, PathBuf) {
|
||||
let temp_dir = create_temp_dir();
|
||||
let db_path = temp_dir.path().join("test.db");
|
||||
let conn = db::open(db_path.clone()).expect("Failed to open database");
|
||||
(temp_dir, conn, db_path)
|
||||
}
|
||||
|
||||
/// Create a test item in the database
|
||||
pub fn create_test_item(conn: &Connection) -> i64 {
|
||||
let item = crate::db::Item {
|
||||
id: None,
|
||||
ts: chrono::Utc::now(),
|
||||
size: Some(100),
|
||||
compression: crate::compression_engine::CompressionType::None.to_string(),
|
||||
};
|
||||
db::insert_item(conn, item).expect("Failed to insert item")
|
||||
}
|
||||
|
||||
/// Test compression and decompression with an engine
|
||||
pub fn test_compression_engine(
|
||||
engine: &dyn crate::compression_engine::CompressionEngine,
|
||||
test_data: &[u8],
|
||||
) {
|
||||
let dir = create_temp_dir();
|
||||
let file_path = dir.path().join("test_compression.dat");
|
||||
|
||||
// Test compression
|
||||
{
|
||||
let mut writer = engine
|
||||
.create(file_path.clone())
|
||||
.expect("Failed to create writer");
|
||||
writer.write_all(test_data).expect("Failed to write data");
|
||||
}
|
||||
|
||||
// Test decompression
|
||||
let mut reader = engine.open(file_path).expect("Failed to open reader");
|
||||
let mut decompressed = Vec::new();
|
||||
std::io::copy(&mut reader, &mut decompressed).expect("Failed to read data");
|
||||
|
||||
assert_eq!(test_data, decompressed.as_slice());
|
||||
}
|
||||
|
||||
/// Get the size of a file
|
||||
pub fn get_file_size(file_path: &PathBuf) -> u64 {
|
||||
let metadata = std::fs::metadata(file_path).expect("Failed to get file metadata");
|
||||
metadata.len()
|
||||
}
|
||||
|
||||
/// Assert that a file exists
|
||||
pub fn assert_file_exists(file_path: &PathBuf) {
|
||||
assert!(file_path.exists(), "File {:?} does not exist", file_path);
|
||||
}
|
||||
|
||||
/// Assert that a file does not exist
|
||||
pub fn assert_file_not_exists(file_path: &PathBuf) {
|
||||
assert!(
|
||||
!file_path.exists(),
|
||||
"File {:?} should not exist but it does",
|
||||
file_path
|
||||
);
|
||||
}
|
||||
21
src/tests/compression/gzip_tests.rs
Normal file
21
src/tests/compression/gzip_tests.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::compression_engine::CompressionEngine;
|
||||
use crate::compression_engine::gzip::CompressionEngineGZip;
|
||||
use crate::tests::common::test_helpers::test_compression_engine;
|
||||
|
||||
#[test]
|
||||
fn test_compression_engine_gzip() {
|
||||
let test_data = b"test compression data";
|
||||
let engine = CompressionEngineGZip {};
|
||||
assert!(engine.is_supported());
|
||||
test_compression_engine(&engine, test_data);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compression_engine_gzip_empty_data() {
|
||||
let test_data = b"";
|
||||
let engine = CompressionEngineGZip {};
|
||||
test_compression_engine(&engine, test_data);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user