Compare commits
673 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2e6c9795ec | ||
|
|
c6b667f8b3 | ||
|
|
ad4cddb4f3 | ||
|
|
ddba83b993 | ||
|
|
91c33b0431 | ||
|
|
d1df40633a | ||
|
|
7f9fc484e8 | ||
|
|
ecf564648e | ||
|
|
150e3190bc | ||
|
|
63947346e9 | ||
|
|
86816ce357 | ||
|
|
0d34831df4 | ||
|
|
c35da67401 | ||
|
|
fb47022380 | ||
|
|
46c5128418 | ||
|
|
4a5bfee616 | ||
|
|
f8314e0f8e | ||
|
|
9624af4e67 | ||
|
|
5bec4768e7 | ||
|
|
3851b0943a | ||
|
|
cc1f640a50 | ||
|
|
ec0a2dc053 | ||
|
|
a6166a1ad7 | ||
|
|
41e3d1f490 | ||
|
|
2cbecaa552 | ||
|
|
8d543dcc7d | ||
|
|
18b1afe34f | ||
|
|
0f86bbfad8 | ||
|
|
0d021a800a | ||
|
|
038304384a | ||
|
|
2c09ad6b91 | ||
|
|
0bd09d03c1 | ||
|
|
faa0e6c289 | ||
|
|
c28d800d7f | ||
|
|
4fd772ecd8 | ||
|
|
5520a84062 | ||
|
|
66c7123f7c | ||
|
|
bacf4154fd | ||
|
|
61790d2261 | ||
|
|
899111a310 | ||
|
|
3bfa35e1c7 | ||
|
|
ebefcb7fc1 | ||
|
|
ce11685371 | ||
|
|
9edb848947 | ||
|
|
f326096fad | ||
|
|
46f0b23f4f | ||
|
|
1c1d3bd619 | ||
|
|
d894f92d5e | ||
|
|
6c44191fe4 | ||
|
|
0deb78a9af | ||
|
|
9c15f4ba88 | ||
|
|
4ba27ec1d6 | ||
|
|
c8dd80530a | ||
|
|
eda5ea7d1a | ||
|
|
77a916e1a8 | ||
|
|
7ba2a4b27b | ||
|
|
d33f69720a | ||
|
|
59c880dc36 | ||
|
|
e5c355e8f9 | ||
|
|
d36fadf3ca | ||
|
|
b618cbdf7c | ||
|
|
15ec7173aa | ||
|
|
4166e92754 | ||
|
|
85166b6e8b | ||
|
|
5278599675 | ||
|
|
18cac8ba5d | ||
|
|
dfccbceea6 | ||
|
|
fc4b651e46 | ||
|
|
fb89922ecf | ||
|
|
8ab23c8cd9 | ||
|
|
787a2c5071 | ||
|
|
da76a20345 | ||
|
|
9688dbdb36 | ||
|
|
6fa16e1a5e | ||
|
|
71a2e3cfca | ||
|
|
e9c0f7e200 | ||
|
|
25154a4331 | ||
|
|
22c152f600 | ||
|
|
3eab61cbc3 | ||
|
|
a029c1d0db | ||
|
|
706757d215 | ||
|
|
9054c233f4 | ||
|
|
efb0748fc9 | ||
|
|
751b0ef716 | ||
|
|
716450b97e | ||
|
|
2c289a4d8f | ||
|
|
a4ad4c033f | ||
|
|
511bca9d66 | ||
|
|
ac3fb03b2d | ||
|
|
282087d0f3 | ||
|
|
781282599c | ||
|
|
d611ab0ee2 | ||
|
|
411cbdffee | ||
|
|
cfd19e02a7 | ||
|
|
717eeb3903 | ||
|
|
a394fb8757 | ||
|
|
2125a7ffdb | ||
|
|
00c0a6ec60 | ||
|
|
090bcf89ac | ||
|
|
4a768dec48 | ||
|
|
c8d72ddd3b | ||
|
|
5cf618695f | ||
|
|
8a1f497265 | ||
|
|
acdf20f800 | ||
|
|
dbd1003002 | ||
|
|
48db3d3fcc | ||
|
|
41ccd14f25 | ||
|
|
60800df798 | ||
|
|
9c36f2cbc5 | ||
|
|
0b4fff907a | ||
|
|
442f09d0fe | ||
|
|
50af28b2aa | ||
|
|
28ad74a68e | ||
|
|
13cdbae38f | ||
|
|
55c77df5ae | ||
|
|
9b1d2fd985 | ||
|
|
91b7ea0367 | ||
|
|
96d3926d09 | ||
|
|
c709b5a7eb | ||
|
|
df82914005 | ||
|
|
b1bdc38283 | ||
|
|
beb1215329 | ||
|
|
51784388b9 | ||
|
|
dbbbd53a4d | ||
|
|
f9d992c969 | ||
|
|
29a4d61e90 | ||
|
|
2667cdb26c | ||
|
|
a1669a5104 | ||
|
|
059f1bd63d | ||
|
|
82ae5e442c | ||
|
|
b10114cd7c | ||
|
|
33f730aac4 | ||
|
|
92fdfdb05c | ||
|
|
fbaf3f3623 | ||
|
|
5f400bc513 | ||
|
|
0fc59645fc | ||
|
|
e2dee272b8 | ||
|
|
364cf362f4 | ||
|
|
8394a263c4 | ||
|
|
0e9aa26cfc | ||
|
|
6a23d63266 | ||
|
|
af2fc15964 | ||
|
|
5919037a4a | ||
|
|
a761dab229 | ||
|
|
fa656e1f56 | ||
|
|
77e141e84a | ||
|
|
2439965fa8 | ||
|
|
f66afbee90 | ||
|
|
5a89d23a67 | ||
|
|
07c8dad1c3 | ||
|
|
beb8b18e98 | ||
|
|
887bb5d7cc | ||
|
|
4a9542d970 | ||
|
|
c049d9d5ff | ||
|
|
c2cc4389a0 | ||
|
|
12b5011266 | ||
|
|
6e3cad454c | ||
|
|
8251bd028c | ||
|
|
da87d452c2 | ||
|
|
9bca0dfb3c | ||
|
|
57904c4a97 | ||
|
|
4e74d851e9 | ||
|
|
e5c1f69b02 | ||
|
|
9d390d064c | ||
|
|
4994d7892c | ||
|
|
1ea06e3c42 | ||
|
|
a4b7a6dfc7 | ||
|
|
7fe1cce606 | ||
|
|
7e5abe32e0 | ||
|
|
47caf7c142 | ||
|
|
cf4d777344 | ||
|
|
255927c346 | ||
|
|
e8c5fc79a6 | ||
|
|
b309b24d0b | ||
|
|
13f4cca9d5 | ||
|
|
b3c0273e0c | ||
|
|
1df7fdf703 | ||
|
|
cbf38309e2 | ||
|
|
2ec7257dd7 | ||
|
|
531aac6923 | ||
|
|
59b4604c77 | ||
|
|
52aa269af9 | ||
|
|
8a03d9c498 | ||
|
|
a36fc7ecfd | ||
|
|
7b0c269bce | ||
|
|
c10bf9b357 | ||
|
|
0606642953 | ||
|
|
d1b2cae201 | ||
|
|
097e567122 | ||
|
|
d22e1d6a24 | ||
|
|
2827069bd9 | ||
|
|
614e3bd2a0 | ||
|
|
ff756a01d2 | ||
|
|
db14606dbe | ||
|
|
de0a69ede5 | ||
|
|
5bf5065d9a | ||
|
|
0235dadbf7 | ||
|
|
203a15b447 | ||
|
|
fe4dfe2194 | ||
|
|
c2eb93abe0 | ||
|
|
d32b834ae7 | ||
|
|
cecf45a698 | ||
|
|
69cd348cc3 | ||
|
|
868025ffa3 | ||
|
|
60126a8cc5 | ||
|
|
8cfba49559 | ||
|
|
168f053c6f | ||
|
|
897e1d4539 | ||
|
|
5ef6a0f4ea | ||
|
|
eb80e32812 | ||
|
|
620dadafe4 | ||
|
|
e76fa878d2 | ||
|
|
376b421eb9 | ||
|
|
e1643aca80 | ||
|
|
4e97c0c5c9 | ||
|
|
2d51b122af | ||
|
|
05b88a3c73 | ||
|
|
3c087d49e9 | ||
|
|
d81fcccf10 | ||
|
|
ee3a7bbbfc | ||
|
|
82d9e2fb16 | ||
|
|
6ab39d6f70 | ||
|
|
4aa413e697 | ||
|
|
04b3fc54b0 | ||
|
|
e4c5a4e886 | ||
|
|
a0ee7a59eb | ||
|
|
b4a05160df | ||
|
|
1a437b3961 | ||
|
|
bda8555190 | ||
|
|
10ca38f91d | ||
|
|
a468faad20 | ||
|
|
7a20be4aff | ||
|
|
06b974c8a4 | ||
|
|
7284d9fcd8 | ||
|
|
515394049a | ||
|
|
35c8b4f535 | ||
|
|
1a325a66b4 | ||
|
|
7d82116fb9 | ||
|
|
8a7bd4f21b | ||
|
|
2e5a2ef12d | ||
|
|
89aceda65a | ||
|
|
39fd83aa16 | ||
|
|
a23d811fe8 | ||
|
|
a238779724 | ||
|
|
3a848bc037 | ||
|
|
0528ecb454 | ||
|
|
141835593c | ||
|
|
3d06200368 | ||
|
|
729bef9a77 | ||
|
|
94f33bd642 | ||
|
|
7e010cdbca | ||
|
|
8887bcd941 | ||
|
|
56aeeee04c | ||
|
|
98eb3c7287 | ||
|
|
6819c1989b | ||
|
|
7e01dd3e97 | ||
|
|
ea4f2c3de8 | ||
|
|
b2f63b8761 | ||
|
|
65865101ce | ||
|
|
c3637afe69 | ||
|
|
ab543ddf0c | ||
|
|
80595e76e7 | ||
|
|
d49e68737a | ||
|
|
712e15ba80 | ||
|
|
986160e667 | ||
|
|
1ae4e23db1 | ||
|
|
bad646141c | ||
|
|
7911235b68 | ||
|
|
12dee4d14d | ||
|
|
cba841beb8 | ||
|
|
4e3ebf7078 | ||
|
|
1c34969f64 | ||
|
|
dc26cabacd | ||
|
|
a7bffcd471 | ||
|
|
6ae56ac2cc | ||
|
|
03c087020c | ||
|
|
857a1ab9c4 | ||
|
|
64d9530e13 | ||
|
|
5dac1efc30 | ||
|
|
18bc74bc96 | ||
|
|
f64efc63f8 | ||
|
|
e84b897991 | ||
|
|
519647ef93 | ||
|
|
f694fe00e4 | ||
|
|
0b951f27b6 | ||
|
|
8aa082c9df | ||
|
|
f2c5d47bd8 | ||
|
|
ac7642cc15 | ||
|
|
8f34865dab | ||
|
|
c762d12a40 | ||
|
|
fe1e71dc07 | ||
|
|
85b0350ed4 | ||
|
|
a980491455 | ||
|
|
5798c0ccaa | ||
|
|
742f49ca1f | ||
|
|
5560fc805b | ||
|
|
9d4f8a4e8c | ||
|
|
b4d25d6285 | ||
|
|
a504a376bd | ||
|
|
f61ea6e90a | ||
|
|
b2651df36f | ||
|
|
b56c086841 | ||
|
|
0b92fee42e | ||
|
|
4343478c7b | ||
|
|
94649cbfc7 | ||
|
|
fb83f84d84 | ||
|
|
e099a5a32e | ||
|
|
84c2632d40 | ||
|
|
3417ee25eb | ||
|
|
6ada30102c | ||
|
|
ac86ca7266 | ||
|
|
bb1d3edf71 | ||
|
|
97b9253017 | ||
|
|
971c2180c9 | ||
|
|
f96dc6991e | ||
|
|
6855493b2f | ||
|
|
ff0d1f7c42 | ||
|
|
3ae5824761 | ||
|
|
702e865715 | ||
|
|
6bcf64c83f | ||
|
|
18b270c9d0 | ||
|
|
783376acb0 | ||
|
|
81dab470d2 | ||
|
|
a12f0feb66 | ||
|
|
d3c99d9c1c | ||
|
|
3eb3586c0f | ||
|
|
fdde16cf56 | ||
|
|
b8bc5596fd | ||
|
|
47842a79c7 | ||
|
|
391d5bc386 | ||
|
|
ba8561e357 | ||
|
|
6aa1170cef | ||
|
|
6d4363e685 | ||
|
|
6b02b1e1e8 | ||
|
|
df3e68fbaf | ||
|
|
58a5550989 | ||
|
|
ccc9e44ace | ||
|
|
f225c5cf9a | ||
|
|
5c62c7992c | ||
|
|
70b8f09ccb | ||
|
|
abfeafa026 | ||
|
|
aa029b005f | ||
|
|
6cc55e8f36 | ||
|
|
b753d2ca1e | ||
|
|
1e50329c9e | ||
|
|
4942811694 | ||
|
|
59e37e0ccb | ||
|
|
20aa86d8a9 | ||
|
|
64c5ab7042 | ||
|
|
d210f5171a | ||
|
|
c7eee0f14d | ||
|
|
221753b62e | ||
|
|
d213e4d37f | ||
|
|
f8695f21d3 | ||
|
|
4ac1030289 | ||
|
|
93c7117319 | ||
|
|
974afd92ce | ||
|
|
dd1d15f1a4 | ||
|
|
be847baaed | ||
|
|
2b819e6751 | ||
|
|
66247cc005 | ||
|
|
eafd38d3f2 | ||
|
|
c4e590e7a0 | ||
|
|
b92a594114 | ||
|
|
9dfb16f6b8 | ||
|
|
4b74866d85 | ||
|
|
f532c85247 | ||
|
|
b1cc00c1bc | ||
|
|
5696aa49d5 | ||
|
|
e12dc936fd | ||
|
|
6d39a7fb75 | ||
|
|
c87c312349 | ||
|
|
e9c1886cdd | ||
|
|
13e4b1a781 | ||
|
|
3766fb14ef | ||
|
|
29ee50e38b | ||
|
|
d1ab69dc31 | ||
|
|
e3c4a54193 | ||
|
|
2abbd2e3cf | ||
|
|
f9387a5851 | ||
|
|
7a9fb74b54 | ||
|
|
d754f3dd4c | ||
|
|
f54fc9e990 | ||
|
|
8952095da5 | ||
|
|
597240d501 | ||
|
|
7377906d02 | ||
|
|
ce6da1bce3 | ||
|
|
1bf8ff73f8 | ||
|
|
564aaaf3df | ||
|
|
64ba69b2d0 | ||
|
|
ce5ada42af | ||
|
|
1ce5973713 | ||
|
|
b035b53092 | ||
|
|
7d0e02358c | ||
|
|
374ff0aeb5 | ||
|
|
947a43111e | ||
|
|
9970911249 | ||
|
|
5fed81c27b | ||
|
|
dce4f1a5ae | ||
|
|
7e1fc32a1c | ||
|
|
a69f14f504 | ||
|
|
931069458d | ||
|
|
a5259baab0 | ||
|
|
8aaa27350d | ||
|
|
6db6eb70da | ||
|
|
ac74d2b7c2 | ||
|
|
2b316aeae9 | ||
|
|
aff96a45c6 | ||
|
|
9ee246440f | ||
|
|
e2f524ce7a | ||
|
|
a58b054292 | ||
|
|
ea9e5be1fc | ||
|
|
760ea4727c | ||
|
|
f57f2e53a0 | ||
|
|
136a393a17 | ||
|
|
8bbaab78b7 | ||
|
|
067cd59637 | ||
|
|
ce6ac7bf53 | ||
|
|
99271c4477 | ||
|
|
156142ed58 | ||
|
|
4b5516c0eb | ||
|
|
c3d8d2d240 | ||
|
|
c29cf70025 | ||
|
|
6ebce55be3 | ||
|
|
01c4a85bc0 | ||
|
|
12d4206d84 | ||
|
|
946de18bea | ||
|
|
904eb3538c | ||
|
|
c851ca9328 | ||
|
|
0ac415ad83 | ||
|
|
b3ba34d980 | ||
|
|
52740271d9 | ||
|
|
c2e444249a | ||
|
|
97310b091e | ||
|
|
4dda9cc3a1 | ||
|
|
a0538b57e2 | ||
|
|
d7f394eeb6 | ||
|
|
1bc4571d42 | ||
|
|
22e878502a | ||
|
|
03c1b6e30c | ||
|
|
374a434d98 | ||
|
|
f1e85ff0e9 | ||
|
|
6b010f76ea | ||
|
|
0c3e9f7824 | ||
|
|
ccca578622 | ||
|
|
56f7c18550 | ||
|
|
d438f71bbb | ||
|
|
ca5df24b6d | ||
|
|
4a6c2d106f | ||
|
|
cd25a9568b | ||
|
|
f78a787adb | ||
|
|
dc520fa77c | ||
|
|
8f06d4dd9d | ||
|
|
a7047183e1 | ||
|
|
c0b145da24 | ||
|
|
52e7fd6f72 | ||
|
|
4bbe22b1c7 | ||
|
|
4747ffc08b | ||
|
|
9d07131fd6 | ||
|
|
721126d3db | ||
|
|
2b65f5e3dc | ||
|
|
57f10cf387 | ||
|
|
f60c8a173b | ||
|
|
857cd690be | ||
|
|
a407b60152 | ||
|
|
2c3c55adc0 | ||
|
|
f586b4da17 | ||
|
|
0b7eb41049 | ||
|
|
bd19c4e2bd | ||
|
|
e8a73087d6 | ||
|
|
dde4fd82f4 | ||
|
|
0420c393f3 | ||
|
|
c88dac6437 | ||
|
|
cd450f55e2 | ||
|
|
190ee7f9fb | ||
|
|
fd057300cc | ||
|
|
56791089c1 | ||
|
|
e91cb32ca3 | ||
|
|
9ab20df8d2 | ||
|
|
050350501c | ||
|
|
d078acdf73 | ||
|
|
b786a688b5 | ||
|
|
6b7fe40dd2 | ||
|
|
6f6c422246 | ||
|
|
d371ff4f60 | ||
|
|
d1a8348912 | ||
|
|
be956d3cb6 | ||
|
|
ba5beb81b7 | ||
|
|
106bbe5244 | ||
|
|
f39d0e7ba2 | ||
|
|
de7a1fd8ff | ||
|
|
1ac2b25876 | ||
|
|
9e014d1371 | ||
|
|
93b274a113 | ||
|
|
474c7ae873 | ||
|
|
31690d4cad | ||
|
|
bbfc7e7e49 | ||
|
|
1c0aa55e7a | ||
|
|
29778ca19e | ||
|
|
9e87318cc5 | ||
|
|
c645be6b70 | ||
|
|
57fc5ac088 | ||
|
|
924774f52a | ||
|
|
446a7a0844 | ||
|
|
5cfeed76d0 | ||
|
|
de419319d8 | ||
|
|
7a3d36899b | ||
|
|
f5dbb363f4 | ||
|
|
2bbc59a212 | ||
|
|
3403d76aae | ||
|
|
58399cedb6 | ||
|
|
9bca7e9e11 | ||
|
|
3a61430e44 | ||
|
|
7d8c783a7d | ||
|
|
a2e996b550 | ||
|
|
cfc1c31050 | ||
|
|
45106bf6f9 | ||
|
|
6e3cfe491b | ||
|
|
12f2158afd | ||
|
|
6d78773c55 | ||
|
|
43a62d4eb6 | ||
|
|
cc08dfda96 | ||
|
|
622e33588e | ||
|
|
67980b58a0 | ||
|
|
027e444955 | ||
|
|
d838750389 | ||
|
|
71d8bd5266 | ||
|
|
ec4ae24bbd | ||
|
|
1128149359 | ||
|
|
bdfc6634ec | ||
|
|
ca4d19667b | ||
|
|
c71aa7baa7 | ||
|
|
fd80ccd2c5 | ||
|
|
9dc0b24399 | ||
|
|
747954e6fb | ||
|
|
274f4f227e | ||
|
|
92197d8d49 | ||
|
|
aee06920eb | ||
|
|
5111b17d3c | ||
|
|
2849d8f45d | ||
|
|
bac60d9bd4 | ||
|
|
9c797162f4 | ||
|
|
09d184e2f8 | ||
|
|
7bca618906 | ||
|
|
67607103e9 | ||
|
|
73c9956fe4 | ||
|
|
b42f2ffe33 | ||
|
|
30a3f185ef | ||
|
|
4f1b41227f | ||
|
|
83b9d13ec9 | ||
|
|
cee7896c37 | ||
|
|
0377009d2b | ||
|
|
b472f3644e | ||
|
|
5d8ea837c8 | ||
|
|
82de6bc849 | ||
|
|
cb4bc68c48 | ||
|
|
3ce6b38247 | ||
|
|
716c0fe979 | ||
|
|
c993790b7a | ||
|
|
aa32286531 | ||
|
|
6f94abde00 | ||
|
|
fa19538c9d | ||
|
|
84c858b878 | ||
|
|
865de142d4 | ||
|
|
9118162553 | ||
|
|
f4fc6ee9b4 | ||
|
|
108c38d57b | ||
|
|
a1d73eb830 | ||
|
|
997906a610 | ||
|
|
b6e5d120d3 | ||
|
|
d469d0b435 | ||
|
|
e9f823e000 | ||
|
|
d7fb76ba74 | ||
|
|
b7dde1a0d9 | ||
|
|
15095d8c23 | ||
|
|
dfbebc7606 | ||
|
|
895309d93d | ||
|
|
bcf50e821a | ||
|
|
30195800dd | ||
|
|
6532b0f149 | ||
|
|
5e108e4057 | ||
|
|
c2b2f4d222 | ||
|
|
bc4329ad21 | ||
|
|
aec6d1b2f6 | ||
|
|
2baf119299 | ||
|
|
6fe4c5a2ed | ||
|
|
4abc8e41d8 | ||
|
|
af694f1ce9 | ||
|
|
7c3a5fcb83 | ||
|
|
57f64b18c6 | ||
|
|
4cccc7c2f8 | ||
|
|
903a2d6a6e | ||
|
|
34c674487a | ||
|
|
d15a8c5af3 | ||
|
|
3e0dec9383 | ||
|
|
8b810aad81 | ||
|
|
e676bcb4f4 | ||
|
|
a7aed77764 | ||
|
|
88875c0257 | ||
|
|
f711a0c91a | ||
|
|
d8a076cc6e | ||
|
|
c900831ee9 | ||
|
|
76a30c7ef4 | ||
|
|
ae5d0b1d81 | ||
|
|
cd5e87be34 | ||
|
|
3e967f58d2 | ||
|
|
1ea005ba7e | ||
|
|
092772ba90 | ||
|
|
b959854a76 | ||
|
|
8ccb1ebe4f | ||
|
|
91b3be6467 | ||
|
|
d79d5feacc | ||
|
|
5cc78ef9d5 | ||
|
|
8639cd5a72 | ||
|
|
021ddc17e7 | ||
|
|
ee47b8d004 | ||
|
|
55d267c935 | ||
|
|
0fd0b9128d | ||
|
|
d9cf505b50 | ||
|
|
6079332dda | ||
|
|
929ec20365 | ||
|
|
d0cad3055f | ||
|
|
4974a13bc0 | ||
|
|
bd048df225 | ||
|
|
ed83cbd574 | ||
|
|
7230207853 | ||
|
|
1ead8a72ab | ||
|
|
36a2e9d931 | ||
|
|
0f147a5518 | ||
|
|
fce511a18b | ||
|
|
64bb61b009 | ||
|
|
c6eefec5ce | ||
|
|
4c6f829c92 | ||
|
|
8c5cdd2acb | ||
|
|
e5357599c4 | ||
|
|
3800f19966 | ||
|
|
7336f84a4b | ||
|
|
7bf4a5b2b5 | ||
|
|
43a7b97218 | ||
|
|
9f95c57a09 | ||
|
|
8f6056ae66 | ||
|
|
9bcac6b10e | ||
|
|
86318e1b7d | ||
|
|
a8a1458833 | ||
|
|
942c1e2dfe | ||
|
|
a6b6814eae | ||
|
|
0af95aa9b1 | ||
|
|
b4b9256867 | ||
|
|
a6f1281a98 | ||
|
|
b54480928a | ||
|
|
741c74e267 | ||
|
|
3061dba5ed | ||
|
|
09f5f4027e | ||
|
|
925695fd56 | ||
|
|
3c758be856 | ||
|
|
569b76a7e3 | ||
|
|
dca69eff9c | ||
|
|
6b8fedc675 | ||
|
|
c42a379e7c | ||
|
|
a40858adbf | ||
|
|
19bc720bc9 | ||
|
|
bf79ca30bb | ||
|
|
75454895e5 | ||
|
|
c81aa2d6fe | ||
|
|
376f6369b8 | ||
|
|
b1e67a1ed3 | ||
|
|
7393a30bd1 | ||
|
|
c934065f8e | ||
|
|
56124d2b50 | ||
|
|
e8a003ff8a | ||
|
|
4c789225b2 | ||
|
|
59dcdd5393 | ||
|
|
b28316a4f2 |
@@ -1,11 +1,11 @@
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.11.2-slim AS GET_SCRIPTS_STAGE
|
||||
FROM python:3.11.8-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
FROM python:3.11.2-slim
|
||||
FROM python:3.11.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
@@ -18,7 +18,7 @@ ENV PYTHONUNBUFFERED=1
|
||||
EXPOSE 8000 8383 8005
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential
|
||||
apt-get install -y build-essential weasyprint
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
@@ -27,7 +27,7 @@ RUN groupadd -g 1000 tactical && \
|
||||
COPY --from=GET_SCRIPTS_STAGE /community-scripts /community-scripts
|
||||
|
||||
# Copy dev python reqs
|
||||
COPY .devcontainer/requirements.txt /
|
||||
COPY .devcontainer/requirements.txt /
|
||||
|
||||
# Copy docker entrypoint.sh
|
||||
COPY .devcontainer/entrypoint.sh /
|
||||
|
||||
@@ -216,6 +216,7 @@ services:
|
||||
- "443:4443"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
|
||||
volumes:
|
||||
tactical-data-dev: null
|
||||
|
||||
@@ -33,12 +33,12 @@ function check_tactical_ready {
|
||||
}
|
||||
|
||||
function django_setup {
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
until (echo >/dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &>/dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo > /dev/tcp/"${MESH_SERVICE}"/4443) &> /dev/null; do
|
||||
until (echo >/dev/tcp/"${MESH_SERVICE}"/4443) &>/dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
@@ -49,8 +49,11 @@ function django_setup {
|
||||
MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)"
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
localvars="$(cat << EOF
|
||||
|
||||
BASE_DOMAIN=$(echo "import tldextract; no_fetch_extract = tldextract.TLDExtract(suffix_list_urls=()); extracted = no_fetch_extract('${API_HOST}'); print(f'{extracted.domain}.{extracted.suffix}')" | python)
|
||||
|
||||
localvars="$(
|
||||
cat <<EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
DEBUG = True
|
||||
@@ -64,11 +67,17 @@ KEY_FILE = '${CERT_PRIV_PATH}'
|
||||
|
||||
SCRIPTS_DIR = '/community-scripts'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||
|
||||
ADMIN_URL = 'admin/'
|
||||
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '${APP_HOST}', '*']
|
||||
|
||||
CORS_ORIGIN_WHITELIST = ['https://${APP_HOST}']
|
||||
|
||||
SESSION_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
||||
CSRF_COOKIE_DOMAIN = '${BASE_DOMAIN}'
|
||||
CSRF_TRUSTED_ORIGINS = ['https://${API_HOST}', 'https://${APP_HOST}']
|
||||
|
||||
HEADLESS_FRONTEND_URLS = {'socialaccount_login_error': 'https://${APP_HOST}/account/provider/callback'}
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
@@ -78,6 +87,17 @@ DATABASES = {
|
||||
'PASSWORD': '${POSTGRES_PASS}',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
},
|
||||
'reporting': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': '${POSTGRES_DB}',
|
||||
'USER': 'reporting_user',
|
||||
'PASSWORD': 'read_password',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
'OPTIONS': {
|
||||
'options': '-c default_transaction_read_only=on'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,14 +107,16 @@ MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
MESH_WS_URL = '${MESH_WS_URL}'
|
||||
ADMIN_ENABLED = True
|
||||
TRMM_INSECURE = True
|
||||
EOF
|
||||
)"
|
||||
)"
|
||||
|
||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
echo "${localvars}" >${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py generate_json_schemas
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
|
||||
@@ -104,9 +126,8 @@ EOF
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_natsapi_conf
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py post_update_tasks
|
||||
|
||||
|
||||
# create super user
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
}
|
||||
|
||||
@@ -120,6 +141,8 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
||||
mkdir -p /meshcentral-data
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/certs
|
||||
mkdir -p ${TACTICAL_DIR}/reporting
|
||||
mkdir -p ${TACTICAL_DIR}/reporting/assets
|
||||
mkdir -p /mongo/data/db
|
||||
mkdir -p /redis/data
|
||||
touch /meshcentral-data/.initialized && chown -R 1000:1000 /meshcentral-data
|
||||
@@ -127,6 +150,7 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
||||
touch ${TACTICAL_DIR}/certs/.initialized && chown -R 1000:1000 ${TACTICAL_DIR}/certs
|
||||
touch /mongo/data/db/.initialized && chown -R 1000:1000 /mongo/data/db
|
||||
touch /redis/data/.initialized && chown -R 1000:1000 /redis/data
|
||||
touch ${TACTICAL_DIR}/reporting && chown -R 1000:1000 ${TACTICAL_DIR}/reporting
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
|
||||
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -14,11 +14,12 @@ assignees: ''
|
||||
|
||||
**Installation Method:**
|
||||
- [ ] Standard
|
||||
- [ ] Standard with `--insecure` flag at install
|
||||
- [ ] Docker
|
||||
|
||||
**Agent Info (please complete the following information):**
|
||||
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2016]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
6
.github/workflows/ci-tests.yml
vendored
6
.github/workflows/ci-tests.yml
vendored
@@ -14,14 +14,14 @@ jobs:
|
||||
name: Tests
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.11.2"]
|
||||
python-version: ["3.11.8"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: harmon758/postgresql-action@v1
|
||||
with:
|
||||
postgresql version: "14"
|
||||
postgresql version: "15"
|
||||
postgresql db: "pipeline"
|
||||
postgresql user: "pipeline"
|
||||
postgresql password: "pipeline123456"
|
||||
|
||||
70
.github/workflows/codeql-analysis.yml
vendored
70
.github/workflows/codeql-analysis.yml
vendored
@@ -1,70 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ develop ]
|
||||
schedule:
|
||||
- cron: '19 14 * * 6'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
20
.github/workflows/docker-build-push.yml
vendored
20
.github/workflows/docker-build-push.yml
vendored
@@ -9,24 +9,24 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get Github Tag
|
||||
id: prep
|
||||
run: |
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
|
||||
- name: Build and Push Tactical Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
file: ./docker/containers/tactical/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical MeshCentral Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
file: ./docker/containers/tactical-meshcentral/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical NATS Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
file: ./docker/containers/tactical-nats/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
file: ./docker/containers/tactical-frontend/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
|
||||
|
||||
|
||||
- name: Build and Push Tactical Nginx Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -57,3 +57,5 @@ daphne.sock.lock
|
||||
coverage.xml
|
||||
setup_dev.yml
|
||||
11env/
|
||||
query_schema.json
|
||||
gunicorn_config.py
|
||||
37
.vscode/settings.json
vendored
37
.vscode/settings.json
vendored
@@ -1,34 +1,14 @@
|
||||
{
|
||||
"python.defaultInterpreterPath": "api/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm",
|
||||
"api/env"
|
||||
],
|
||||
"python.analysis.extraPaths": ["api/tacticalrmm", "api/env"],
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportUnusedImport": "error",
|
||||
"reportDuplicateImport": "error",
|
||||
"reportGeneralTypeIssues": "none"
|
||||
"reportGeneralTypeIssues": "none",
|
||||
"reportOptionalMemberAccess": "none",
|
||||
},
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--ignore-missing-imports",
|
||||
"--follow-imports=silent",
|
||||
"--show-column-numbers",
|
||||
"--strict"
|
||||
],
|
||||
"python.linting.ignorePatterns": [
|
||||
"**/site-packages/**/*.py",
|
||||
".vscode/*.py",
|
||||
"**env/**"
|
||||
],
|
||||
"python.formatting.provider": "black",
|
||||
"mypy.targets": [
|
||||
"api/tacticalrmm"
|
||||
],
|
||||
"mypy.runUsingActiveInterpreter": true,
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": true,
|
||||
"editor.formatOnSave": true,
|
||||
@@ -37,7 +17,6 @@
|
||||
"**/docker/**/docker-compose*.yml": "dockercompose"
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"files.watcherExclude": {
|
||||
"**/.git/objects/**": true,
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/node_modules/": true,
|
||||
@@ -56,23 +35,25 @@
|
||||
"**/*.parquet*": true,
|
||||
"**/*.pyc": true,
|
||||
"**/*.zip": true
|
||||
}
|
||||
},
|
||||
"go.useLanguageServer": true,
|
||||
"[go]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": false
|
||||
"source.organizeImports": "never"
|
||||
},
|
||||
"editor.snippetSuggestions": "none"
|
||||
},
|
||||
"[go.mod]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
"source.organizeImports": "explicit"
|
||||
}
|
||||
},
|
||||
"gopls": {
|
||||
"usePlaceholders": true,
|
||||
"completeUnimported": true,
|
||||
"staticcheck": true
|
||||
},
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ Tactical RMM is a remote monitoring & management tool, built with Django and Vue
|
||||
It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://demo.tacticalrmm.com/)
|
||||
|
||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
@@ -19,11 +20,11 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
||||
- Teamviewer-like remote desktop control
|
||||
- Real-time remote shell
|
||||
- Remote file browser (download and upload files)
|
||||
- Remote command and script execution (batch, powershell and python scripts)
|
||||
- Remote command and script execution (batch, powershell, python, nushell and deno scripts)
|
||||
- Event log viewer
|
||||
- Services management
|
||||
- Windows patch management
|
||||
- Automated checks with email/SMS alerting (cpu, disk, memory, services, scripts, event logs)
|
||||
- Automated checks with email/SMS/Webhook alerting (cpu, disk, memory, services, scripts, event logs)
|
||||
- Automated task runner (run scripts on a schedule)
|
||||
- Remote software installation via chocolatey
|
||||
- Software and hardware inventory
|
||||
@@ -33,10 +34,12 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
||||
- Windows 7, 8.1, 10, 11, Server 2008R2, 2012R2, 2016, 2019, 2022
|
||||
|
||||
## Linux agent versions supported
|
||||
|
||||
- Any distro with systemd which includes but is not limited to: Debian (10, 11), Ubuntu x86_64 (18.04, 20.04, 22.04), Synology 7, centos, freepbx and more!
|
||||
|
||||
## Mac agent versions supported
|
||||
- 64 bit Intel and Apple Silicon (M1, M2)
|
||||
|
||||
- 64 bit Intel and Apple Silicon (M-Series)
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
user: "tactical"
|
||||
python_ver: "3.11.2"
|
||||
go_ver: "1.19.7"
|
||||
python_ver: "3.11.8"
|
||||
go_ver: "1.20.7"
|
||||
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
|
||||
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
|
||||
scripts_repo: "https://github.com/amidaware/community-scripts.git"
|
||||
|
||||
@@ -13,7 +13,7 @@ http {
|
||||
server_tokens off;
|
||||
tcp_nopush on;
|
||||
types_hash_max_size 2048;
|
||||
server_names_hash_bucket_size 64;
|
||||
server_names_hash_bucket_size 256;
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
deb https://nginx.org/packages/debian/ bullseye nginx
|
||||
deb-src https://nginx.org/packages/debian/ bullseye nginx
|
||||
@@ -1,4 +1,13 @@
|
||||
---
|
||||
- name: Append subdomains to hosts
|
||||
tags: hosts
|
||||
become: yes
|
||||
ansible.builtin.lineinfile:
|
||||
path: /etc/hosts
|
||||
backrefs: yes
|
||||
regexp: '^(127\.0\.1\.1 .*)$'
|
||||
line: "\\1 {{ api }} {{ mesh }} {{ rmm }}"
|
||||
|
||||
- name: set mouse mode for vim
|
||||
tags: vim
|
||||
become: yes
|
||||
@@ -32,11 +41,15 @@
|
||||
with_items:
|
||||
- "{{ base_pkgs }}"
|
||||
|
||||
- name: set arch fact
|
||||
ansible.builtin.set_fact:
|
||||
goarch: "{{ 'amd64' if ansible_architecture == 'x86_64' else 'arm64' }}"
|
||||
|
||||
- name: download and install golang
|
||||
tags: golang
|
||||
become: yes
|
||||
ansible.builtin.unarchive:
|
||||
src: "https://go.dev/dl/go{{ go_ver }}.linux-amd64.tar.gz"
|
||||
src: "https://go.dev/dl/go{{ go_ver }}.linux-{{ goarch }}.tar.gz"
|
||||
dest: /usr/local
|
||||
remote_src: yes
|
||||
|
||||
@@ -102,7 +115,7 @@
|
||||
tags: postgres
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
content: "deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main"
|
||||
content: "deb http://apt.postgresql.org/pub/repos/apt {{ ansible_distribution_release }}-pgdg main"
|
||||
dest: /etc/apt/sources.list.d/pgdg.list
|
||||
owner: root
|
||||
group: root
|
||||
@@ -119,7 +132,7 @@
|
||||
tags: postgres
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: postgresql-14
|
||||
pkg: postgresql-15
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
@@ -131,7 +144,7 @@
|
||||
enabled: yes
|
||||
state: started
|
||||
|
||||
- name: setup database
|
||||
- name: setup trmm database
|
||||
tags: postgres
|
||||
become: yes
|
||||
become_user: postgres
|
||||
@@ -144,6 +157,23 @@
|
||||
psql -c "ALTER ROLE {{ db_user }} SET timezone TO 'UTC'"
|
||||
psql -c "ALTER ROLE {{ db_user }} CREATEDB"
|
||||
psql -c "GRANT ALL PRIVILEGES ON DATABASE tacticalrmm TO {{ db_user }}"
|
||||
psql -c "ALTER DATABASE tacticalrmm OWNER TO {{ db_user }}"
|
||||
psql -c "GRANT USAGE, CREATE ON SCHEMA PUBLIC TO {{ db_user }}"
|
||||
|
||||
- name: setup mesh database
|
||||
tags: postgres
|
||||
become: yes
|
||||
become_user: postgres
|
||||
ansible.builtin.shell:
|
||||
cmd: |
|
||||
psql -c "CREATE DATABASE meshcentral"
|
||||
psql -c "CREATE USER {{ mesh_db_user }} WITH PASSWORD '{{ mesh_db_passwd }}'"
|
||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET client_encoding TO 'utf8'"
|
||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET default_transaction_isolation TO 'read committed'"
|
||||
psql -c "ALTER ROLE {{ mesh_db_user }} SET timezone TO 'UTC'"
|
||||
psql -c "GRANT ALL PRIVILEGES ON DATABASE meshcentral TO {{ mesh_db_user }}"
|
||||
psql -c "ALTER DATABASE meshcentral OWNER TO {{ mesh_db_user }}"
|
||||
psql -c "GRANT USAGE, CREATE ON SCHEMA PUBLIC TO {{ mesh_db_user }}"
|
||||
|
||||
- name: create repo dirs
|
||||
become: yes
|
||||
@@ -193,7 +223,7 @@
|
||||
- name: download and extract nats
|
||||
tags: nats
|
||||
ansible.builtin.unarchive:
|
||||
src: "https://github.com/nats-io/nats-server/releases/download/v{{ nats_server_ver.stdout }}/nats-server-v{{ nats_server_ver.stdout }}-linux-amd64.tar.gz"
|
||||
src: "https://github.com/nats-io/nats-server/releases/download/v{{ nats_server_ver.stdout }}/nats-server-v{{ nats_server_ver.stdout }}-linux-{{ goarch }}.tar.gz"
|
||||
dest: "{{ nats_tmp.path }}"
|
||||
remote_src: yes
|
||||
|
||||
@@ -202,7 +232,7 @@
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
remote_src: yes
|
||||
src: "{{ nats_tmp.path }}/nats-server-v{{ nats_server_ver.stdout }}-linux-amd64/nats-server"
|
||||
src: "{{ nats_tmp.path }}/nats-server-v{{ nats_server_ver.stdout }}-linux-{{ goarch }}/nats-server"
|
||||
dest: /usr/local/bin/nats-server
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
@@ -218,7 +248,7 @@
|
||||
- name: download nodejs setup
|
||||
tags: nodejs
|
||||
ansible.builtin.get_url:
|
||||
url: https://deb.nodesource.com/setup_16.x
|
||||
url: https://deb.nodesource.com/setup_18.x
|
||||
dest: "{{ nodejs_tmp.path }}/setup_node.sh"
|
||||
mode: "0755"
|
||||
|
||||
@@ -299,14 +329,14 @@
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.apt_key:
|
||||
url: https://nginx.org/packages/keys/nginx_signing.key
|
||||
url: https://nginx.org/keys/nginx_signing.key
|
||||
state: present
|
||||
|
||||
- name: add nginx repo
|
||||
tags: nginx
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
src: nginx.repo
|
||||
ansible.builtin.template:
|
||||
src: nginx.repo.j2
|
||||
dest: /etc/apt/sources.list.d/nginx.list
|
||||
owner: "root"
|
||||
group: "root"
|
||||
@@ -382,12 +412,16 @@
|
||||
enabled: yes
|
||||
state: restarted
|
||||
|
||||
- name: set natsapi fact
|
||||
ansible.builtin.set_fact:
|
||||
natsapi: "{{ 'nats-api' if ansible_architecture == 'x86_64' else 'nats-api-arm64' }}"
|
||||
|
||||
- name: copy nats-api bin
|
||||
tags: nats-api
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
remote_src: yes
|
||||
src: "{{ backend_dir }}/natsapi/bin/nats-api"
|
||||
src: "{{ backend_dir }}/natsapi/bin/{{ natsapi }}"
|
||||
dest: /usr/local/bin/nats-api
|
||||
owner: "{{ user }}"
|
||||
group: "{{ user }}"
|
||||
@@ -473,39 +507,6 @@
|
||||
- { src: nats-server.systemd.j2, dest: /etc/systemd/system/nats.service }
|
||||
- { src: mesh.systemd.j2, dest: /etc/systemd/system/meshcentral.service }
|
||||
|
||||
- name: import mongodb repo signing key
|
||||
tags: mongo
|
||||
become: yes
|
||||
ansible.builtin.apt_key:
|
||||
url: https://www.mongodb.org/static/pgp/server-4.4.asc
|
||||
state: present
|
||||
|
||||
- name: setup mongodb repo
|
||||
tags: mongo
|
||||
become: yes
|
||||
ansible.builtin.copy:
|
||||
content: "deb https://repo.mongodb.org/apt/debian buster/mongodb-org/4.4 main"
|
||||
dest: /etc/apt/sources.list.d/mongodb-org-4.4.list
|
||||
owner: root
|
||||
group: root
|
||||
mode: "0644"
|
||||
|
||||
- name: install mongodb
|
||||
tags: mongo
|
||||
become: yes
|
||||
ansible.builtin.apt:
|
||||
pkg: mongodb-org
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: ensure mongodb enabled and started
|
||||
tags: mongo
|
||||
become: yes
|
||||
ansible.builtin.service:
|
||||
name: mongod
|
||||
enabled: yes
|
||||
state: started
|
||||
|
||||
- name: get mesh_ver
|
||||
tags: mesh
|
||||
ansible.builtin.shell: grep "^MESH_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
|
||||
|
||||
@@ -2,10 +2,6 @@ SECRET_KEY = "{{ django_secret }}"
|
||||
DEBUG = True
|
||||
ALLOWED_HOSTS = ['{{ api }}']
|
||||
ADMIN_URL = "admin/"
|
||||
CORS_ORIGIN_WHITELIST = [
|
||||
"http://{{ rmm }}:8080",
|
||||
"https://{{ rmm }}:8080",
|
||||
]
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
DATABASES = {
|
||||
'default': {
|
||||
@@ -17,9 +13,8 @@ DATABASES = {
|
||||
'PORT': '5432',
|
||||
}
|
||||
}
|
||||
REDIS_HOST = "localhost"
|
||||
ADMIN_ENABLED = True
|
||||
CERT_FILE = "{{ fullchain_src }}"
|
||||
KEY_FILE = "{{ privkey_src }}"
|
||||
CERT_FILE = "{{ fullchain_dest }}"
|
||||
KEY_FILE = "{{ privkey_dest }}"
|
||||
MESH_USERNAME = "{{ mesh_user }}"
|
||||
MESH_SITE = "https://{{ mesh }}"
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
{
|
||||
"settings": {
|
||||
"Cert": "{{ mesh }}",
|
||||
"MongoDb": "mongodb://127.0.0.1:27017",
|
||||
"MongoDbName": "meshcentral",
|
||||
"WANonly": true,
|
||||
"Minify": 1,
|
||||
"Port": 4430,
|
||||
@@ -10,19 +8,25 @@
|
||||
"RedirPort": 800,
|
||||
"AllowLoginToken": true,
|
||||
"AllowFraming": true,
|
||||
"AgentPong": 300,
|
||||
"AgentPing": 35,
|
||||
"AllowHighQualityDesktop": true,
|
||||
"TlsOffload": "127.0.0.1",
|
||||
"agentCoreDump": false,
|
||||
"Compression": true,
|
||||
"WsCompression": true,
|
||||
"AgentWsCompression": true,
|
||||
"MaxInvalidLogin": { "time": 5, "count": 5, "coolofftime": 30 }
|
||||
"MaxInvalidLogin": { "time": 5, "count": 5, "coolofftime": 30 },
|
||||
"postgres": {
|
||||
"user": "{{ mesh_db_user }}",
|
||||
"password": "{{ mesh_db_passwd }}",
|
||||
"port": "5432",
|
||||
"host": "localhost"
|
||||
}
|
||||
},
|
||||
"domains": {
|
||||
"": {
|
||||
"Title": "Tactical RMM",
|
||||
"Title2": "Tactical RMM",
|
||||
"Title": "Tactical RMM Dev",
|
||||
"Title2": "Tactical RMM Dev",
|
||||
"NewAccounts": false,
|
||||
"CertUrl": "https://{{ mesh }}:443/",
|
||||
"GeoLocation": true,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[Unit]
|
||||
Description=MeshCentral Server
|
||||
After=network.target mongod.service nginx.service
|
||||
After=network.target postgresql.service nginx.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
|
||||
2
ansible/roles/trmm_dev/templates/nginx.repo.j2
Normal file
2
ansible/roles/trmm_dev/templates/nginx.repo.j2
Normal file
@@ -0,0 +1,2 @@
|
||||
deb https://nginx.org/packages/debian/ {{ ansible_distribution_release }} nginx
|
||||
deb-src https://nginx.org/packages/debian/ {{ ansible_distribution_release }} nginx
|
||||
@@ -1,4 +1,4 @@
|
||||
DEV_URL = "http://{{ api }}:8000"
|
||||
DEV_HOST = "{{ rmm }}"
|
||||
DEV_HOST = "0.0.0.0"
|
||||
DEV_PORT = "8080"
|
||||
USE_HTTPS = false
|
||||
@@ -13,6 +13,8 @@
|
||||
mesh_password: "changeme"
|
||||
db_user: "changeme"
|
||||
db_passwd: "changeme"
|
||||
mesh_db_user: "changeme"
|
||||
mesh_db_passwd: "changeme"
|
||||
django_secret: "changeme"
|
||||
django_user: "changeme"
|
||||
django_password: "changeme"
|
||||
|
||||
@@ -3,6 +3,7 @@ import uuid
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
from tacticalrmm.helpers import make_random_password
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -17,7 +18,7 @@ class Command(BaseCommand):
|
||||
User.objects.create_user(
|
||||
username=uuid.uuid4().hex,
|
||||
is_installer_user=True,
|
||||
password=User.objects.make_random_password(60),
|
||||
password=make_random_password(len=60),
|
||||
block_dashboard_login=True,
|
||||
)
|
||||
self.stdout.write("Installer user has been created")
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
from tacticalrmm.helpers import get_webdomain
|
||||
from tacticalrmm.util_settings import get_webdomain
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -26,7 +27,7 @@ class Command(BaseCommand):
|
||||
user.save(update_fields=["totp_key"])
|
||||
|
||||
url = pyotp.totp.TOTP(code).provisioning_uri(
|
||||
username, issuer_name=get_webdomain()
|
||||
username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
)
|
||||
subprocess.run(f'qr "{url}"', shell=True)
|
||||
self.stdout.write(
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 4.2.1 on 2023-05-17 07:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0031_user_date_format"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="default_agent_tbl_tab",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
],
|
||||
default="mixed",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,32 @@
|
||||
# Generated by Django 4.2.1 on 2023-05-23 04:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0032_alter_user_default_agent_tbl_tab"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_info_color",
|
||||
field=models.CharField(default="info", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_negative_color",
|
||||
field=models.CharField(default="negative", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_positive_color",
|
||||
field=models.CharField(default="positive", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dash_warning_color",
|
||||
field=models.CharField(default="warning", max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.1.9 on 2023-05-26 23:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0033_user_dash_info_color_user_dash_negative_color_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_send_wol",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 4.2.5 on 2023-10-08 22:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0034_role_can_send_wol"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_manage_reports",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_view_reports",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-09 19:57
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("accounts", "0035_role_can_manage_reports_role_can_view_reports"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="role",
|
||||
name="can_ping_agents",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0036_remove_role_can_ping_agents"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_run_server_scripts",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_use_webterm",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.16 on 2024-10-06 05:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0037_role_can_run_server_scripts_role_can_use_webterm"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_edit_global_keystore",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="can_view_global_keystore",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Optional
|
||||
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
@@ -31,7 +32,7 @@ class User(AbstractUser, BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
default_agent_tbl_tab = models.CharField(
|
||||
max_length=50, choices=AgentTableTabs.choices, default=AgentTableTabs.SERVER
|
||||
max_length=50, choices=AgentTableTabs.choices, default=AgentTableTabs.MIXED
|
||||
)
|
||||
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
|
||||
client_tree_sort = models.CharField(
|
||||
@@ -39,6 +40,10 @@ class User(AbstractUser, BaseAuditModel):
|
||||
)
|
||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
dash_info_color = models.CharField(max_length=255, default="info")
|
||||
dash_positive_color = models.CharField(max_length=255, default="positive")
|
||||
dash_negative_color = models.CharField(max_length=255, default="negative")
|
||||
dash_warning_color = models.CharField(max_length=255, default="warning")
|
||||
clear_search_when_switching = models.BooleanField(default=True)
|
||||
date_format = models.CharField(max_length=30, blank=True, null=True)
|
||||
is_installer_user = models.BooleanField(default=False)
|
||||
@@ -60,6 +65,19 @@ class User(AbstractUser, BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
@property
|
||||
def mesh_user_id(self):
|
||||
return f"user//{self.mesh_username}"
|
||||
|
||||
@property
|
||||
def mesh_username(self):
|
||||
# lower() needed for mesh api
|
||||
return f"{self.username.replace(' ', '').lower()}___{self.pk}"
|
||||
|
||||
@property
|
||||
def is_sso_user(self):
|
||||
return SocialAccount.objects.filter(user_id=self.pk).exists()
|
||||
|
||||
@staticmethod
|
||||
def serialize(user):
|
||||
# serializes the task and returns json
|
||||
@@ -91,7 +109,6 @@ class Role(BaseAuditModel):
|
||||
|
||||
# agents
|
||||
can_list_agents = models.BooleanField(default=False)
|
||||
can_ping_agents = models.BooleanField(default=False)
|
||||
can_use_mesh = models.BooleanField(default=False)
|
||||
can_uninstall_agents = models.BooleanField(default=False)
|
||||
can_update_agents = models.BooleanField(default=False)
|
||||
@@ -105,6 +122,7 @@ class Role(BaseAuditModel):
|
||||
can_run_bulk = models.BooleanField(default=False)
|
||||
can_recover_agents = models.BooleanField(default=False)
|
||||
can_list_agent_history = models.BooleanField(default=False)
|
||||
can_send_wol = models.BooleanField(default=False)
|
||||
|
||||
# core
|
||||
can_list_notes = models.BooleanField(default=False)
|
||||
@@ -116,6 +134,10 @@ class Role(BaseAuditModel):
|
||||
can_run_urlactions = models.BooleanField(default=False)
|
||||
can_view_customfields = models.BooleanField(default=False)
|
||||
can_manage_customfields = models.BooleanField(default=False)
|
||||
can_run_server_scripts = models.BooleanField(default=False)
|
||||
can_use_webterm = models.BooleanField(default=False)
|
||||
can_view_global_keystore = models.BooleanField(default=False)
|
||||
can_edit_global_keystore = models.BooleanField(default=False)
|
||||
|
||||
# checks
|
||||
can_list_checks = models.BooleanField(default=False)
|
||||
@@ -181,13 +203,17 @@ class Role(BaseAuditModel):
|
||||
can_list_api_keys = models.BooleanField(default=False)
|
||||
can_manage_api_keys = models.BooleanField(default=False)
|
||||
|
||||
# reporting
|
||||
can_view_reports = models.BooleanField(default=False)
|
||||
can_manage_reports = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
# delete cache on save
|
||||
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm
|
||||
from tacticalrmm.utils import get_core_settings
|
||||
|
||||
|
||||
class AccountsPerms(permissions.BasePermission):
|
||||
@@ -40,3 +41,14 @@ class APIKeyPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_list_api_keys")
|
||||
|
||||
return _has_perm(r, "can_manage_api_keys")
|
||||
|
||||
|
||||
class LocalUserPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
settings = get_core_settings()
|
||||
return not settings.block_local_user_logon
|
||||
|
||||
|
||||
class SelfResetSSOPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return not r.user.is_sso_user
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import pyotp
|
||||
from django.conf import settings
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
|
||||
from tacticalrmm.util_settings import get_webdomain
|
||||
|
||||
from .models import APIKey, Role, User
|
||||
|
||||
|
||||
@@ -20,6 +23,10 @@ class UserUISerializer(ModelSerializer):
|
||||
"client_tree_sort",
|
||||
"client_tree_splitter",
|
||||
"loading_bar_color",
|
||||
"dash_info_color",
|
||||
"dash_positive_color",
|
||||
"dash_negative_color",
|
||||
"dash_warning_color",
|
||||
"clear_search_when_switching",
|
||||
"block_dashboard_login",
|
||||
"date_format",
|
||||
@@ -57,7 +64,7 @@ class TOTPSetupSerializer(ModelSerializer):
|
||||
|
||||
def get_qr_url(self, obj):
|
||||
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
||||
obj.username, issuer_name="Tactical RMM"
|
||||
obj.username, issuer_name=get_webdomain(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -11,19 +11,20 @@ from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
class TestAccounts(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.setup_client()
|
||||
self.bob = User(username="bob")
|
||||
self.bob.set_password("hunter2")
|
||||
self.bob.save()
|
||||
|
||||
def test_check_creds(self):
|
||||
url = "/checkcreds/"
|
||||
url = "/v2/checkcreds/"
|
||||
|
||||
data = {"username": "bob", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("totp", r.data.keys())
|
||||
self.assertEqual(r.data["totp"], "totp not set")
|
||||
self.assertEqual(r.data["totp"], False)
|
||||
|
||||
data = {"username": "bob", "password": "a3asdsa2314"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -40,7 +41,7 @@ class TestAccounts(TacticalTestCase):
|
||||
data = {"username": "bob", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "ok")
|
||||
self.assertEqual(r.data["totp"], True)
|
||||
|
||||
# test user set to block dashboard logins
|
||||
self.bob.block_dashboard_login = True
|
||||
@@ -50,7 +51,7 @@ class TestAccounts(TacticalTestCase):
|
||||
|
||||
@patch("pyotp.TOTP.verify")
|
||||
def test_login_view(self, mock_verify):
|
||||
url = "/login/"
|
||||
url = "/v2/login/"
|
||||
|
||||
mock_verify.return_value = True
|
||||
data = {"username": "bob", "password": "hunter2", "twofactor": "123456"}
|
||||
@@ -404,7 +405,7 @@ class TestTOTPSetup(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "totp token already set")
|
||||
self.assertEqual(r.data, False)
|
||||
|
||||
|
||||
class TestAPIAuthentication(TacticalTestCase):
|
||||
|
||||
@@ -5,6 +5,10 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("users/", views.GetAddUsers.as_view()),
|
||||
path("<int:pk>/users/", views.GetUpdateDeleteUser.as_view()),
|
||||
path("sessions/<str:pk>/", views.DeleteActiveLoginSession.as_view()),
|
||||
path(
|
||||
"users/<int:pk>/sessions/", views.GetDeleteActiveLoginSessionsPerUser.as_view()
|
||||
),
|
||||
path("users/reset/", views.UserActions.as_view()),
|
||||
path("users/reset_totp/", views.UserActions.as_view()),
|
||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.http import HttpRequest
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
@@ -16,3 +18,7 @@ def is_root_user(*, request: "HttpRequest", user: "User") -> bool:
|
||||
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
||||
)
|
||||
return root or demo
|
||||
|
||||
|
||||
def is_superuser(user: "User") -> bool:
|
||||
return user.role and getattr(user.role, "is_superuser")
|
||||
|
||||
@@ -1,20 +1,39 @@
|
||||
import datetime
|
||||
|
||||
import pyotp
|
||||
from allauth.socialaccount.models import SocialAccount, SocialApp
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ipware import get_client_ip
|
||||
from django.utils import timezone as djangotime
|
||||
from knox.models import AuthToken
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from python_ipware import IpWare
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.utils import is_root_user
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.utils import get_core_settings
|
||||
|
||||
from .models import APIKey, Role, User
|
||||
from .permissions import AccountsPerms, APIKeyPerms, RolesPerms
|
||||
from .permissions import (
|
||||
AccountsPerms,
|
||||
APIKeyPerms,
|
||||
LocalUserPerms,
|
||||
RolesPerms,
|
||||
SelfResetSSOPerms,
|
||||
)
|
||||
from .serializers import (
|
||||
APIKeySerializer,
|
||||
RoleSerializer,
|
||||
@@ -22,12 +41,15 @@ from .serializers import (
|
||||
UserSerializer,
|
||||
UserUISerializer,
|
||||
)
|
||||
from accounts.utils import is_root_user
|
||||
|
||||
|
||||
class CheckCreds(KnoxLoginView):
|
||||
class CheckCredsV2(KnoxLoginView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
# restrict time on tokens issued by this view to 3 min
|
||||
def get_token_ttl(self):
|
||||
return datetime.timedelta(seconds=180)
|
||||
|
||||
def post(self, request, format=None):
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
@@ -39,20 +61,25 @@ class CheckCreds(KnoxLoginView):
|
||||
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login:
|
||||
if user.block_dashboard_login or user.is_sso_user:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# block local logon if configured
|
||||
core_settings = get_core_settings()
|
||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# if totp token not set modify response to notify frontend
|
||||
if not user.totp_key:
|
||||
login(request, user)
|
||||
response = super(CheckCreds, self).post(request, format=None)
|
||||
response.data["totp"] = "totp not set"
|
||||
response = super().post(request, format=None)
|
||||
response.data["totp"] = False
|
||||
return response
|
||||
|
||||
return Response("ok")
|
||||
return Response({"totp": True})
|
||||
|
||||
|
||||
class LoginView(KnoxLoginView):
|
||||
class LoginViewV2(KnoxLoginView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request, format=None):
|
||||
@@ -65,6 +92,14 @@ class LoginView(KnoxLoginView):
|
||||
if user.block_dashboard_login:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# block local logon if configured
|
||||
core_settings = get_core_settings()
|
||||
if not user.is_superuser and core_settings.block_local_user_logon:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
if user.is_sso_user:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
token = request.data["twofactor"]
|
||||
totp = pyotp.TOTP(user.totp_key)
|
||||
|
||||
@@ -79,14 +114,20 @@ class LoginView(KnoxLoginView):
|
||||
login(request, user)
|
||||
|
||||
# save ip information
|
||||
client_ip, _ = get_client_ip(request)
|
||||
user.last_login_ip = client_ip
|
||||
user.save()
|
||||
ipw = IpWare()
|
||||
client_ip, _ = ipw.get_client_ip(request.META)
|
||||
if client_ip:
|
||||
user.last_login_ip = str(client_ip)
|
||||
user.save()
|
||||
|
||||
AuditLog.audit_user_login_successful(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return super(LoginView, self).post(request, format=None)
|
||||
response = super().post(request, format=None)
|
||||
response.data["username"] = request.user.username
|
||||
response.data["name"] = None
|
||||
|
||||
return Response(response.data)
|
||||
else:
|
||||
AuditLog.audit_user_failed_twofactor(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
@@ -94,9 +135,100 @@ class LoginView(KnoxLoginView):
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
|
||||
class GetDeleteActiveLoginSessionsPerUser(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
class TokenSerializer(ModelSerializer):
|
||||
user = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = AuthToken
|
||||
fields = (
|
||||
"digest",
|
||||
"user",
|
||||
"created",
|
||||
"expiry",
|
||||
)
|
||||
|
||||
def get(self, request, pk):
|
||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
||||
expiry__gt=djangotime.now()
|
||||
)
|
||||
|
||||
return Response(self.TokenSerializer(tokens, many=True).data)
|
||||
|
||||
def delete(self, request, pk):
|
||||
tokens = get_object_or_404(User, pk=pk).auth_token_set.filter(
|
||||
expiry__gt=djangotime.now()
|
||||
)
|
||||
|
||||
tokens.delete()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class DeleteActiveLoginSession(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def delete(self, request, pk):
|
||||
token = get_object_or_404(AuthToken, digest=pk)
|
||||
|
||||
token.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetAddUsers(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
class UserSerializerSSO(ModelSerializer):
|
||||
social_accounts = SerializerMethodField()
|
||||
|
||||
def get_social_accounts(self, obj):
|
||||
accounts = SocialAccount.objects.filter(user_id=obj.pk)
|
||||
|
||||
if accounts:
|
||||
social_accounts = []
|
||||
for account in accounts:
|
||||
try:
|
||||
provider_account = account.get_provider_account()
|
||||
display = provider_account.to_str()
|
||||
except SocialApp.DoesNotExist:
|
||||
display = "Orphaned Provider"
|
||||
except Exception:
|
||||
display = "Unknown"
|
||||
|
||||
social_accounts.append(
|
||||
{
|
||||
"uid": account.uid,
|
||||
"provider": account.provider,
|
||||
"display": display,
|
||||
"last_login": account.last_login,
|
||||
"date_joined": account.date_joined,
|
||||
"extra_data": account.extra_data,
|
||||
}
|
||||
)
|
||||
|
||||
return social_accounts
|
||||
|
||||
return []
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"username",
|
||||
"first_name",
|
||||
"last_name",
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"last_login_ip",
|
||||
"role",
|
||||
"block_dashboard_login",
|
||||
"date_format",
|
||||
"social_accounts",
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
search = request.GET.get("search", None)
|
||||
|
||||
@@ -107,7 +239,7 @@ class GetAddUsers(APIView):
|
||||
else:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
return Response(self.UserSerializerSSO(users, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# add new user
|
||||
@@ -131,6 +263,7 @@ class GetAddUsers(APIView):
|
||||
user.role = role
|
||||
|
||||
user.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(user.username)
|
||||
|
||||
|
||||
@@ -151,6 +284,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -160,12 +294,12 @@ class GetUpdateDeleteUser(APIView):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
|
||||
user.delete()
|
||||
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class UserActions(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
permission_classes = [IsAuthenticated, AccountsPerms, LocalUserPerms]
|
||||
|
||||
# reset password
|
||||
def post(self, request):
|
||||
@@ -202,7 +336,7 @@ class TOTPSetup(APIView):
|
||||
user.save(update_fields=["totp_key"])
|
||||
return Response(TOTPSetupSerializer(user).data)
|
||||
|
||||
return Response("totp token already set")
|
||||
return Response(False)
|
||||
|
||||
|
||||
class UserUI(APIView):
|
||||
@@ -241,11 +375,13 @@ class GetUpdateDeleteRole(APIView):
|
||||
serializer = RoleSerializer(instance=role, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
role.delete()
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("Role was removed")
|
||||
|
||||
|
||||
@@ -289,7 +425,7 @@ class GetUpdateDeleteAPIKey(APIView):
|
||||
|
||||
|
||||
class ResetPass(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
@@ -299,7 +435,7 @@ class ResetPass(APIView):
|
||||
|
||||
|
||||
class Reset2FA(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [IsAuthenticated, SelfResetSSOPerms]
|
||||
|
||||
def put(self, request):
|
||||
user = request.user
|
||||
|
||||
@@ -47,7 +47,7 @@ class SendCMD(AsyncJsonWebsocketConsumer):
|
||||
await self.send_json({"ret": ret})
|
||||
|
||||
async def disconnect(self, _):
|
||||
await self.close()
|
||||
pass
|
||||
|
||||
def _has_perm(self, perm: str) -> bool:
|
||||
if self.user.is_superuser or (
|
||||
|
||||
@@ -33,6 +33,11 @@ class Command(BaseCommand):
|
||||
type=str,
|
||||
help="Delete agents that belong to the specified client",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--hostname",
|
||||
type=str,
|
||||
help="Delete agents with hostname starting with argument",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
@@ -44,33 +49,38 @@ class Command(BaseCommand):
|
||||
agentver = kwargs["agentver"]
|
||||
site = kwargs["site"]
|
||||
client = kwargs["client"]
|
||||
hostname = kwargs["hostname"]
|
||||
delete = kwargs["delete"]
|
||||
|
||||
if not days and not agentver and not site and not client:
|
||||
if not days and not agentver and not site and not client and not hostname:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Must have at least one parameter: days, agentver, site, or client"
|
||||
"Must have at least one parameter: days, agentver, site, client or hostname"
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
q = Agent.objects.defer(*AGENT_DEFER)
|
||||
agents = Agent.objects.select_related("site__client").defer(*AGENT_DEFER)
|
||||
|
||||
agents = []
|
||||
if days:
|
||||
overdue = djangotime.now() - djangotime.timedelta(days=days)
|
||||
agents = [i for i in q if i.last_seen < overdue]
|
||||
|
||||
if agentver:
|
||||
agents = [i for i in q if pyver.parse(i.version) <= pyver.parse(agentver)]
|
||||
agents = agents.filter(last_seen__lt=overdue)
|
||||
|
||||
if site:
|
||||
agents = [i for i in q if i.site.name == site]
|
||||
agents = agents.filter(site__name=site)
|
||||
|
||||
if client:
|
||||
agents = [i for i in q if i.client.name == client]
|
||||
agents = agents.filter(site__client__name=client)
|
||||
|
||||
if not agents:
|
||||
if hostname:
|
||||
agents = agents.filter(hostname__istartswith=hostname)
|
||||
|
||||
if agentver:
|
||||
agents = [
|
||||
i for i in agents if pyver.parse(i.version) <= pyver.parse(agentver)
|
||||
]
|
||||
|
||||
if len(agents) == 0:
|
||||
self.stdout.write(self.style.ERROR("No agents matched"))
|
||||
return
|
||||
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AGENT_DEFER
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def find_duplicates(self, lst):
|
||||
return list(set([item for item in lst if lst.count(item) > 1]))
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
for agent in Agent.objects.defer(*AGENT_DEFER).prefetch_related(
|
||||
"custom_fields__field"
|
||||
):
|
||||
if dupes := self.find_duplicates(
|
||||
[i.field.name for i in agent.custom_fields.all()]
|
||||
):
|
||||
for dupe in dupes:
|
||||
cf = list(
|
||||
agent.custom_fields.filter(field__name=dupe).order_by("id")
|
||||
)
|
||||
to_delete = cf[:-1]
|
||||
for i in to_delete:
|
||||
i.delete()
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2.3 on 2023-07-18 01:15
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("core", "0037_coresettings_open_ai_model_and_more"),
|
||||
("agents", "0056_alter_agent_time_zone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="agentcustomfield",
|
||||
unique_together={("agent", "field")},
|
||||
),
|
||||
]
|
||||
633
api/tacticalrmm/agents/migrations/0058_alter_agent_time_zone.py
Normal file
633
api/tacticalrmm/agents/migrations/0058_alter_agent_time_zone.py
Normal file
@@ -0,0 +1,633 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-09 19:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("agents", "0057_alter_agentcustomfield_unique_together"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="agent",
|
||||
name="time_zone",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
(
|
||||
"America/Argentina/Buenos_Aires",
|
||||
"America/Argentina/Buenos_Aires",
|
||||
),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
(
|
||||
"America/Argentina/ComodRivadavia",
|
||||
"America/Argentina/ComodRivadavia",
|
||||
),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
(
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
"America/Argentina/Rio_Gallegos",
|
||||
),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
(
|
||||
"America/North_Dakota/New_Salem",
|
||||
"America/North_Dakota/New_Salem",
|
||||
),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("Factory", "Factory"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
("localtime", "localtime"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0058_alter_agent_time_zone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="agenthistory",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,36 @@
|
||||
# Generated by Django 4.2.16 on 2024-10-05 20:39
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0047_alter_coresettings_notify_on_warning_alerts"),
|
||||
("agents", "0059_alter_agenthistory_id"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="agenthistory",
|
||||
name="collector_all_output",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="agenthistory",
|
||||
name="custom_field",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="history",
|
||||
to="core.customfield",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="agenthistory",
|
||||
name="save_to_agent_note",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +1,13 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from collections import Counter
|
||||
from contextlib import suppress
|
||||
from distutils.version import LooseVersion
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
|
||||
|
||||
import msgpack
|
||||
import nats
|
||||
import validators
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.cache import cache
|
||||
@@ -16,11 +15,12 @@ from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from nats.errors import TimeoutError
|
||||
from packaging import version as pyver
|
||||
from packaging.version import Version as LooseVersion
|
||||
|
||||
from agents.utils import get_agent_url
|
||||
from checks.models import CheckResult
|
||||
from core.models import TZ_CHOICES
|
||||
from core.utils import get_core_settings, send_command_with_mesh
|
||||
from core.utils import _b64_to_hex, get_core_settings, send_command_with_mesh
|
||||
from logs.models import BaseAuditModel, DebugLog, PendingAction
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_STATUS_OFFLINE,
|
||||
@@ -40,7 +40,7 @@ from tacticalrmm.constants import (
|
||||
PAAction,
|
||||
PAStatus,
|
||||
)
|
||||
from tacticalrmm.helpers import setup_nats_options
|
||||
from tacticalrmm.helpers import has_script_actions, has_webhook, setup_nats_options
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -54,6 +54,8 @@ if TYPE_CHECKING:
|
||||
# type helpers
|
||||
Disk = Union[Dict[str, Any], str]
|
||||
|
||||
logger = logging.getLogger("trmm")
|
||||
|
||||
|
||||
class Agent(BaseAuditModel):
|
||||
class Meta:
|
||||
@@ -124,6 +126,27 @@ class Agent(BaseAuditModel):
|
||||
def __str__(self) -> str:
|
||||
return self.hostname
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# prevent recursion since calling set_alert_template() also calls save()
|
||||
if not hasattr(self, "_processing_set_alert_template"):
|
||||
self._processing_set_alert_template = False
|
||||
|
||||
if self.pk and not self._processing_set_alert_template:
|
||||
orig = Agent.objects.get(pk=self.pk)
|
||||
mon_type_changed = self.monitoring_type != orig.monitoring_type
|
||||
site_changed = self.site_id != orig.site_id
|
||||
policy_changed = self.policy != orig.policy
|
||||
block_inherit = (
|
||||
self.block_policy_inheritance != orig.block_policy_inheritance
|
||||
)
|
||||
|
||||
if mon_type_changed or site_changed or policy_changed or block_inherit:
|
||||
self._processing_set_alert_template = True
|
||||
self.set_alert_template()
|
||||
self._processing_set_alert_template = False
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def client(self) -> "Client":
|
||||
return self.site.client
|
||||
@@ -280,7 +303,20 @@ class Agent(BaseAuditModel):
|
||||
try:
|
||||
cpus = self.wmi_detail["cpu"]
|
||||
for cpu in cpus:
|
||||
ret.append([x["Name"] for x in cpu if "Name" in x][0])
|
||||
name = [x["Name"] for x in cpu if "Name" in x][0]
|
||||
lp, nc = "", ""
|
||||
with suppress(Exception):
|
||||
lp = [
|
||||
x["NumberOfLogicalProcessors"]
|
||||
for x in cpu
|
||||
if "NumberOfCores" in x
|
||||
][0]
|
||||
nc = [x["NumberOfCores"] for x in cpu if "NumberOfCores" in x][0]
|
||||
if lp and nc:
|
||||
cpu_string = f"{name}, {nc}C/{lp}T"
|
||||
else:
|
||||
cpu_string = name
|
||||
ret.append(cpu_string)
|
||||
return ret
|
||||
except:
|
||||
return ["unknown cpu model"]
|
||||
@@ -408,6 +444,23 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return ["unknown disk"]
|
||||
|
||||
@property
|
||||
def serial_number(self) -> str:
|
||||
if self.is_posix:
|
||||
try:
|
||||
return self.wmi_detail["serialnumber"]
|
||||
except:
|
||||
return ""
|
||||
|
||||
try:
|
||||
return self.wmi_detail["bios"][0][0]["SerialNumber"]
|
||||
except:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def hex_mesh_node_id(self) -> str:
|
||||
return _b64_to_hex(self.mesh_node_id)
|
||||
|
||||
@classmethod
|
||||
def online_agents(cls, min_version: str = "") -> "List[Agent]":
|
||||
if min_version:
|
||||
@@ -495,24 +548,32 @@ class Agent(BaseAuditModel):
|
||||
)
|
||||
|
||||
return {
|
||||
"agent_policy": self.policy
|
||||
if self.policy and not self.policy.is_agent_excluded(self)
|
||||
else None,
|
||||
"site_policy": site_policy
|
||||
if (site_policy and not site_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
else None,
|
||||
"client_policy": client_policy
|
||||
if (client_policy and not client_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
else None,
|
||||
"default_policy": default_policy
|
||||
if (default_policy and not default_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
and not self.client.block_policy_inheritance
|
||||
else None,
|
||||
"agent_policy": (
|
||||
self.policy
|
||||
if self.policy and not self.policy.is_agent_excluded(self)
|
||||
else None
|
||||
),
|
||||
"site_policy": (
|
||||
site_policy
|
||||
if (site_policy and not site_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
else None
|
||||
),
|
||||
"client_policy": (
|
||||
client_policy
|
||||
if (client_policy and not client_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
else None
|
||||
),
|
||||
"default_policy": (
|
||||
default_policy
|
||||
if (default_policy and not default_policy.is_agent_excluded(self))
|
||||
and not self.block_policy_inheritance
|
||||
and not self.site.block_policy_inheritance
|
||||
and not self.client.block_policy_inheritance
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
def check_run_interval(self) -> int:
|
||||
@@ -546,6 +607,7 @@ class Agent(BaseAuditModel):
|
||||
run_as_user = True
|
||||
|
||||
parsed_args = script.parse_script_args(self, script.shell, args)
|
||||
parsed_env_vars = script.parse_script_env_vars(self, script.shell, env_vars)
|
||||
|
||||
data = {
|
||||
"func": "runscriptfull" if full else "runscript",
|
||||
@@ -556,7 +618,9 @@ class Agent(BaseAuditModel):
|
||||
"shell": script.shell,
|
||||
},
|
||||
"run_as_user": run_as_user,
|
||||
"env_vars": env_vars,
|
||||
"env_vars": parsed_env_vars,
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
if history_pk != 0:
|
||||
@@ -787,9 +851,6 @@ class Agent(BaseAuditModel):
|
||||
cache.set(cache_key, tasks, 600)
|
||||
return tasks
|
||||
|
||||
def _do_nats_debug(self, agent: "Agent", message: str) -> None:
|
||||
DebugLog.error(agent=agent, log_type=DebugLogType.AGENT_ISSUES, message=message)
|
||||
|
||||
async def nats_cmd(
|
||||
self, data: Dict[Any, Any], timeout: int = 30, wait: bool = True
|
||||
) -> Any:
|
||||
@@ -811,9 +872,7 @@ class Agent(BaseAuditModel):
|
||||
ret = msgpack.loads(msg.data)
|
||||
except Exception as e:
|
||||
ret = str(e)
|
||||
await sync_to_async(self._do_nats_debug, thread_sensitive=False)(
|
||||
agent=self, message=ret
|
||||
)
|
||||
logger.error(e)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
@@ -876,8 +935,10 @@ class Agent(BaseAuditModel):
|
||||
# extract the version from the title and sort from oldest to newest
|
||||
# skip if no version info is available therefore nothing to parse
|
||||
try:
|
||||
matches = r"(Version|Versão)"
|
||||
pattern = r"\(" + matches + r"(.*?)\)"
|
||||
vers = [
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip()
|
||||
re.search(pattern, i, flags=re.IGNORECASE).group(2).strip()
|
||||
for i in titles
|
||||
]
|
||||
sorted_vers = sorted(vers, key=LooseVersion)
|
||||
@@ -894,18 +955,22 @@ class Agent(BaseAuditModel):
|
||||
def should_create_alert(
|
||||
self, alert_template: "Optional[AlertTemplate]" = None
|
||||
) -> bool:
|
||||
return bool(
|
||||
has_agent_notification = (
|
||||
self.overdue_dashboard_alert
|
||||
or self.overdue_email_alert
|
||||
or self.overdue_text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.agent_always_alert
|
||||
or alert_template.agent_always_email
|
||||
or alert_template.agent_always_text
|
||||
)
|
||||
)
|
||||
)
|
||||
has_alert_template_notification = alert_template and (
|
||||
alert_template.agent_always_alert
|
||||
or alert_template.agent_always_email
|
||||
or alert_template.agent_always_text
|
||||
)
|
||||
|
||||
return bool(
|
||||
has_agent_notification
|
||||
or has_alert_template_notification
|
||||
or has_webhook(alert_template, "agent")
|
||||
or has_script_actions(alert_template, "agent")
|
||||
)
|
||||
|
||||
def send_outage_email(self) -> None:
|
||||
@@ -999,6 +1064,9 @@ class AgentCustomField(models.Model):
|
||||
default=list,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("agent", "field"),)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.field.name
|
||||
|
||||
@@ -1031,6 +1099,7 @@ class AgentCustomField(models.Model):
|
||||
class AgentHistory(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="history",
|
||||
@@ -1053,6 +1122,15 @@ class AgentHistory(models.Model):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_results = models.JSONField(null=True, blank=True)
|
||||
custom_field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="history",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
collector_all_output = models.BooleanField(default=False)
|
||||
save_to_agent_note = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.agent.hostname} - {self.type}"
|
||||
|
||||
@@ -47,13 +47,6 @@ class UpdateAgentPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_update_agents")
|
||||
|
||||
|
||||
class PingAgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_ping_agents") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class ManageProcPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
return _has_perm(r, "can_manage_procs") and _has_perm_on_agent(
|
||||
@@ -122,3 +115,13 @@ class AgentHistoryPerms(permissions.BasePermission):
|
||||
)
|
||||
|
||||
return _has_perm(r, "can_list_agent_history")
|
||||
|
||||
|
||||
class AgentWOLPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_send_wol") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
return _has_perm(r, "can_send_wol")
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE
|
||||
from tacticalrmm.constants import AGENT_STATUS_ONLINE, ALL_TIMEZONES
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField, AgentHistory, Note
|
||||
@@ -71,7 +70,7 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
return policies
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
return ALL_TIMEZONES
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
@@ -95,6 +94,8 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
local_ips = serializers.ReadOnlyField()
|
||||
make_model = serializers.ReadOnlyField()
|
||||
physical_disks = serializers.ReadOnlyField()
|
||||
serial_number = serializers.ReadOnlyField()
|
||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||
|
||||
def get_alert_template(self, obj):
|
||||
if not obj.alert_template:
|
||||
@@ -153,6 +154,8 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"local_ips",
|
||||
"make_model",
|
||||
"physical_disks",
|
||||
"custom_fields",
|
||||
"serial_number",
|
||||
]
|
||||
depth = 2
|
||||
|
||||
|
||||
@@ -175,7 +175,7 @@ def run_script_email_results_task(
|
||||
return
|
||||
|
||||
CORE = get_core_settings()
|
||||
subject = f"{agent.hostname} {script.name} Results"
|
||||
subject = f"{agent.client.name}, {agent.site.name}, {agent.hostname} {script.name} Results"
|
||||
exec_time = "{:.4f}".format(r["execution_time"])
|
||||
body = (
|
||||
subject
|
||||
|
||||
61
api/tacticalrmm/agents/tests/test_agent_save.py
Normal file
61
api/tacticalrmm/agents/tests/test_agent_save.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.constants import AgentMonType
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class AgentSaveTestCase(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.client1 = baker.make("clients.Client")
|
||||
self.client2 = baker.make("clients.Client")
|
||||
self.site1 = baker.make("clients.Site", client=self.client1)
|
||||
self.site2 = baker.make("clients.Site", client=self.client2)
|
||||
self.site3 = baker.make("clients.Site", client=self.client2)
|
||||
self.agent = baker.make(
|
||||
"agents.Agent",
|
||||
site=self.site1,
|
||||
monitoring_type=AgentMonType.SERVER,
|
||||
)
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_mon_type_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_site_change(self, mock_set_alert_template):
|
||||
self.agent.site = self.site2
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_called_on_site_and_montype_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
print(f"before: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
||||
self.agent.site = self.site3
|
||||
self.agent.monitoring_type = AgentMonType.WORKSTATION
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_called_once()
|
||||
print(f"after: {self.agent.monitoring_type} site: {self.agent.site_id}")
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_not_called_without_changes(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_not_called()
|
||||
|
||||
@patch.object(Agent, "set_alert_template")
|
||||
def test_set_alert_template_not_called_on_non_relevant_field_change(
|
||||
self, mock_set_alert_template
|
||||
):
|
||||
self.agent.hostname = "abc123"
|
||||
self.agent.save()
|
||||
mock_set_alert_template.assert_not_called()
|
||||
@@ -2,9 +2,9 @@ import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
from typing import TYPE_CHECKING
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import PropertyMock, patch
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
@@ -573,12 +573,14 @@ class TestAgentViews(TacticalTestCase):
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
|
||||
email_task.assert_called_with(
|
||||
agentpk=self.agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=18,
|
||||
emails=[],
|
||||
args=["abc", "123"],
|
||||
history_pk=hist.pk,
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
@@ -588,12 +590,14 @@ class TestAgentViews(TacticalTestCase):
|
||||
data["emailMode"] = "custom"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
|
||||
email_task.assert_called_with(
|
||||
agentpk=self.agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=18,
|
||||
emails=["admin@example.com", "bob@example.com"],
|
||||
args=["abc", "123"],
|
||||
history_pk=hist.pk,
|
||||
run_as_user=False,
|
||||
env_vars=["hello=world", "foo=bar"],
|
||||
)
|
||||
@@ -764,6 +768,67 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
|
||||
|
||||
# test run on server
|
||||
with patch("core.utils.run_server_script") as mock_run_server_script:
|
||||
mock_run_server_script.return_value = ("output", "error", 1.23456789, 0)
|
||||
data = {
|
||||
"script": script.pk,
|
||||
"output": "wait",
|
||||
"args": ["arg1", "arg2"],
|
||||
"timeout": 15,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["key1=val1", "key2=val2"],
|
||||
"run_on_server": True,
|
||||
}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
|
||||
if not hist:
|
||||
raise AgentHistory.DoesNotExist
|
||||
|
||||
mock_run_server_script.assert_called_with(
|
||||
body=script.script_body,
|
||||
args=script.parse_script_args(self.agent, script.shell, data["args"]),
|
||||
env_vars=script.parse_script_env_vars(
|
||||
self.agent, script.shell, data["env_vars"]
|
||||
),
|
||||
shell=script.shell,
|
||||
timeout=18,
|
||||
)
|
||||
|
||||
expected_ret = {
|
||||
"stdout": "output",
|
||||
"stderr": "error",
|
||||
"execution_time": "1.2346",
|
||||
"retcode": 0,
|
||||
}
|
||||
|
||||
self.assertEqual(r.data, expected_ret)
|
||||
|
||||
hist.refresh_from_db()
|
||||
expected_script_results = {**expected_ret, "id": hist.pk}
|
||||
self.assertEqual(hist.script_results, expected_script_results)
|
||||
|
||||
# test run on server with server scripts disabled
|
||||
with patch(
|
||||
"core.models.CoreSettings.server_scripts_enabled",
|
||||
new_callable=PropertyMock,
|
||||
) as server_scripts_enabled:
|
||||
server_scripts_enabled.return_value = False
|
||||
|
||||
data = {
|
||||
"script": script.pk,
|
||||
"output": "wait",
|
||||
"args": ["arg1", "arg2"],
|
||||
"timeout": 15,
|
||||
"run_as_user": False,
|
||||
"env_vars": ["key1=val1", "key2=val2"],
|
||||
"run_on_server": True,
|
||||
}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_get_notes(self):
|
||||
url = f"{base_url}/notes/"
|
||||
|
||||
@@ -862,7 +927,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
# test pulling data
|
||||
r = self.client.get(url, format="json")
|
||||
ctx = {"default_tz": pytz.timezone("America/Los_Angeles")}
|
||||
ctx = {"default_tz": ZoneInfo("America/Los_Angeles")}
|
||||
data = AgentHistorySerializer(history, many=True, context=ctx).data
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, data) # type:ignore
|
||||
@@ -1016,7 +1081,6 @@ class TestAgentPermissions(TacticalTestCase):
|
||||
{"method": "post", "action": "recover", "role": "can_recover_agents"},
|
||||
{"method": "post", "action": "reboot", "role": "can_reboot_agents"},
|
||||
{"method": "patch", "action": "reboot", "role": "can_reboot_agents"},
|
||||
{"method": "get", "action": "ping", "role": "can_ping_agents"},
|
||||
{"method": "get", "action": "meshcentral", "role": "can_use_mesh"},
|
||||
{"method": "post", "action": "meshcentral/recover", "role": "can_use_mesh"},
|
||||
{"method": "get", "action": "processes", "role": "can_manage_procs"},
|
||||
|
||||
@@ -15,6 +15,7 @@ urlpatterns = [
|
||||
path("<agent:agent_id>/wmi/", views.WMI.as_view()),
|
||||
path("<agent:agent_id>/recover/", views.recover),
|
||||
path("<agent:agent_id>/reboot/", views.Reboot.as_view()),
|
||||
path("<agent:agent_id>/shutdown/", views.Shutdown.as_view()),
|
||||
path("<agent:agent_id>/ping/", views.ping),
|
||||
# alias for checks get view
|
||||
path("<agent:agent_id>/checks/", GetAddChecks.as_view()),
|
||||
@@ -43,4 +44,5 @@ urlpatterns = [
|
||||
path("installer/", views.install_agent),
|
||||
path("bulkrecovery/", views.bulk_agent_recovery),
|
||||
path("scripthistory/", views.ScriptRunHistory.as_view()),
|
||||
path("<agent:agent_id>/wol/", views.wol),
|
||||
]
|
||||
|
||||
@@ -6,19 +6,12 @@ import time
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
|
||||
from core.utils import (
|
||||
get_core_settings,
|
||||
get_mesh_ws_url,
|
||||
remove_mesh_agent,
|
||||
token_is_valid,
|
||||
)
|
||||
from django.conf import settings
|
||||
from django.db.models import Exists, OuterRef, Prefetch, Q
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from meshctrl.utils import get_login_token
|
||||
from packaging import version as pyver
|
||||
from rest_framework import serializers
|
||||
@@ -27,8 +20,18 @@ from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import (
|
||||
get_core_settings,
|
||||
get_mesh_ws_url,
|
||||
remove_mesh_agent,
|
||||
token_is_valid,
|
||||
wake_on_lan,
|
||||
)
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from scripts.tasks import bulk_command_task, bulk_script_task
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_DEFER,
|
||||
AGENT_STATUS_OFFLINE,
|
||||
@@ -49,7 +52,7 @@ from tacticalrmm.permissions import (
|
||||
_has_perm_on_site,
|
||||
)
|
||||
from tacticalrmm.utils import get_default_timezone, reload_nats
|
||||
from winupdate.models import WinUpdate
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
@@ -58,11 +61,11 @@ from .permissions import (
|
||||
AgentHistoryPerms,
|
||||
AgentNotesPerms,
|
||||
AgentPerms,
|
||||
AgentWOLPerms,
|
||||
EvtLogPerms,
|
||||
InstallAgentPerms,
|
||||
ManageProcPerms,
|
||||
MeshPerms,
|
||||
PingAgentPerms,
|
||||
RebootAgentPerms,
|
||||
RecoverAgentPerms,
|
||||
RunBulkPerms,
|
||||
@@ -134,6 +137,10 @@ class GetAgents(APIView):
|
||||
"checkresults",
|
||||
queryset=CheckResult.objects.select_related("assigned_check"),
|
||||
),
|
||||
Prefetch(
|
||||
"custom_fields",
|
||||
queryset=AgentCustomField.objects.select_related("field"),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
has_patches_pending=Exists(
|
||||
@@ -183,7 +190,36 @@ class GetUpdateDeleteAgent(APIView):
|
||||
|
||||
# get agent details
|
||||
def get(self, request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
from checks.models import Check, CheckResult
|
||||
|
||||
agent = get_object_or_404(
|
||||
Agent.objects.select_related(
|
||||
"site__server_policy",
|
||||
"site__workstation_policy",
|
||||
"site__client__server_policy",
|
||||
"site__client__workstation_policy",
|
||||
"policy",
|
||||
"alert_template",
|
||||
).prefetch_related(
|
||||
Prefetch(
|
||||
"agentchecks",
|
||||
queryset=Check.objects.select_related("script"),
|
||||
),
|
||||
Prefetch(
|
||||
"checkresults",
|
||||
queryset=CheckResult.objects.select_related("assigned_check"),
|
||||
),
|
||||
Prefetch(
|
||||
"custom_fields",
|
||||
queryset=AgentCustomField.objects.select_related("field"),
|
||||
),
|
||||
Prefetch(
|
||||
"winupdatepolicy",
|
||||
queryset=WinUpdatePolicy.objects.select_related("agent", "policy"),
|
||||
),
|
||||
),
|
||||
agent_id=agent_id,
|
||||
)
|
||||
return Response(AgentSerializer(agent).data)
|
||||
|
||||
# edit agent
|
||||
@@ -223,6 +259,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response("The agent was updated successfully")
|
||||
|
||||
# uninstall agent
|
||||
@@ -248,6 +285,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
message=f"Unable to remove agent {name} from meshcentral database: {e}",
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
)
|
||||
sync_mesh_perms_task.delay()
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@@ -290,13 +328,13 @@ class AgentMeshCentral(APIView):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
core = get_core_settings()
|
||||
|
||||
if not core.mesh_disable_auto_login:
|
||||
token = get_login_token(
|
||||
key=core.mesh_token, user=f"user//{core.mesh_username}"
|
||||
)
|
||||
token_param = f"login={token}&"
|
||||
else:
|
||||
token_param = ""
|
||||
user = (
|
||||
request.user.mesh_user_id
|
||||
if core.sync_mesh_with_trmm
|
||||
else f"user//{core.mesh_api_superuser}"
|
||||
)
|
||||
token = get_login_token(key=core.mesh_token, user=user)
|
||||
token_param = f"login={token}&"
|
||||
|
||||
control = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
@@ -366,7 +404,7 @@ def update_agents(request):
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAuthenticated, PingAgentPerms])
|
||||
@permission_classes([IsAuthenticated, AgentPerms])
|
||||
def ping(request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
status = AGENT_STATUS_OFFLINE
|
||||
@@ -456,6 +494,19 @@ def send_raw_cmd(request, agent_id):
|
||||
return Response(r)
|
||||
|
||||
|
||||
class Shutdown(APIView):
|
||||
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||
|
||||
# shutdown
|
||||
def post(self, request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "shutdown"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Reboot(APIView):
|
||||
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||
|
||||
@@ -528,10 +579,18 @@ class Reboot(APIView):
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, InstallAgentPerms])
|
||||
def install_agent(request):
|
||||
from knox.models import AuthToken
|
||||
|
||||
from accounts.models import User
|
||||
from agents.utils import get_agent_url
|
||||
from core.utils import token_is_valid
|
||||
from knox.models import AuthToken
|
||||
|
||||
insecure = getattr(settings, "TRMM_INSECURE", False)
|
||||
|
||||
if insecure and request.data["installMethod"] in {"exe", "powershell"}:
|
||||
return notify_error(
|
||||
"Not available in insecure mode. Please use the 'Manual' method."
|
||||
)
|
||||
|
||||
# TODO rework this ghetto validation hack
|
||||
# https://github.com/amidaware/tacticalrmm/issues/1461
|
||||
@@ -635,6 +694,9 @@ def install_agent(request):
|
||||
if int(request.data["power"]):
|
||||
cmd.append("--power")
|
||||
|
||||
if insecure:
|
||||
cmd.append("--insecure")
|
||||
|
||||
resp["cmd"] = " ".join(str(i) for i in cmd)
|
||||
else:
|
||||
install_flags.insert(0, f"sudo ./{inno}")
|
||||
@@ -643,6 +705,8 @@ def install_agent(request):
|
||||
resp["cmd"] = (
|
||||
dl + f" && chmod +x {inno} && " + " ".join(str(i) for i in cmd)
|
||||
)
|
||||
if insecure:
|
||||
resp["cmd"] += " --insecure"
|
||||
|
||||
resp["url"] = download_url
|
||||
|
||||
@@ -704,6 +768,10 @@ def run_script(request, agent_id):
|
||||
run_as_user: bool = request.data["run_as_user"]
|
||||
env_vars: list[str] = request.data["env_vars"]
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
run_on_server: bool | None = request.data.get("run_on_server")
|
||||
|
||||
if run_on_server and not get_core_settings().server_scripts_enabled:
|
||||
return notify_error("This feature is disabled.")
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
username=request.user.username,
|
||||
@@ -720,6 +788,29 @@ def run_script(request, agent_id):
|
||||
)
|
||||
history_pk = hist.pk
|
||||
|
||||
if run_on_server:
|
||||
from core.utils import run_server_script
|
||||
|
||||
r = run_server_script(
|
||||
body=script.script_body,
|
||||
args=script.parse_script_args(agent, script.shell, args),
|
||||
env_vars=script.parse_script_env_vars(agent, script.shell, env_vars),
|
||||
shell=script.shell,
|
||||
timeout=req_timeout,
|
||||
)
|
||||
|
||||
ret = {
|
||||
"stdout": r[0],
|
||||
"stderr": r[1],
|
||||
"execution_time": "{:.4f}".format(r[2]),
|
||||
"retcode": r[3],
|
||||
}
|
||||
|
||||
hist.script_results = {**ret, "id": history_pk}
|
||||
hist.save(update_fields=["script_results"])
|
||||
|
||||
return Response(ret)
|
||||
|
||||
if output == "wait":
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk,
|
||||
@@ -742,6 +833,7 @@ def run_script(request, agent_id):
|
||||
nats_timeout=req_timeout,
|
||||
emails=emails,
|
||||
args=args,
|
||||
history_pk=history_pk,
|
||||
run_as_user=run_as_user,
|
||||
env_vars=env_vars,
|
||||
)
|
||||
@@ -913,7 +1005,7 @@ def bulk(request):
|
||||
agents: list[int] = [agent.pk for agent in q]
|
||||
|
||||
if not agents:
|
||||
return notify_error("No agents where found meeting the selected criteria")
|
||||
return notify_error("No agents were found meeting the selected criteria")
|
||||
|
||||
AuditLog.audit_bulk_action(
|
||||
request.user,
|
||||
@@ -922,34 +1014,51 @@ def bulk(request):
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
ht = "Check the History tab on the agent to view the results."
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
if request.data["shell"] == "custom" and request.data["custom_shell"]:
|
||||
shell = request.data["custom_shell"]
|
||||
else:
|
||||
shell = request.data["shell"]
|
||||
|
||||
handle_bulk_command_task.delay(
|
||||
agents,
|
||||
request.data["cmd"],
|
||||
shell,
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
request.data["run_as_user"],
|
||||
bulk_command_task.delay(
|
||||
agent_pks=agents,
|
||||
cmd=request.data["cmd"],
|
||||
shell=shell,
|
||||
timeout=request.data["timeout"],
|
||||
username=request.user.username[:50],
|
||||
run_as_user=request.data["run_as_user"],
|
||||
)
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
return Response(f"Command will now be run on {len(agents)} agents. {ht}")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["script"])
|
||||
handle_bulk_script_task.delay(
|
||||
script.pk,
|
||||
agents,
|
||||
request.data["args"],
|
||||
request.data["timeout"],
|
||||
request.user.username[:50],
|
||||
request.data["run_as_user"],
|
||||
request.data["env_vars"],
|
||||
|
||||
# prevent API from breaking for those who haven't updated payload
|
||||
try:
|
||||
custom_field_pk = request.data["custom_field"]
|
||||
collector_all_output = request.data["collector_all_output"]
|
||||
save_to_agent_note = request.data["save_to_agent_note"]
|
||||
except KeyError:
|
||||
custom_field_pk = None
|
||||
collector_all_output = False
|
||||
save_to_agent_note = False
|
||||
|
||||
bulk_script_task.delay(
|
||||
script_pk=script.pk,
|
||||
agent_pks=agents,
|
||||
args=request.data["args"],
|
||||
timeout=request.data["timeout"],
|
||||
username=request.user.username[:50],
|
||||
run_as_user=request.data["run_as_user"],
|
||||
env_vars=request.data["env_vars"],
|
||||
custom_field_pk=custom_field_pk,
|
||||
collector_all_output=collector_all_output,
|
||||
save_to_agent_note=save_to_agent_note,
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents. {ht}")
|
||||
|
||||
elif request.data["mode"] == "patch":
|
||||
if request.data["patchMode"] == "install":
|
||||
@@ -1123,3 +1232,18 @@ class ScriptRunHistory(APIView):
|
||||
|
||||
ret = self.OutputSerializer(hists, many=True).data
|
||||
return Response(ret)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated, AgentWOLPerms])
|
||||
def wol(request, agent_id):
|
||||
agent = get_object_or_404(
|
||||
Agent.objects.defer(*AGENT_DEFER),
|
||||
agent_id=agent_id,
|
||||
)
|
||||
try:
|
||||
uri = get_mesh_ws_url()
|
||||
asyncio.run(wake_on_lan(uri=uri, mesh_node_id=agent.mesh_node_id))
|
||||
except Exception as e:
|
||||
return notify_error(str(e))
|
||||
return Response(f"Wake-on-LAN sent to {agent.hostname}")
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 20:21
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0045_coresettings_enable_server_scripts_and_more"),
|
||||
("alerts", "0013_alerttemplate_action_env_vars_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="action_rest",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="url_action_alert_template",
|
||||
to="core.urlaction",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="action_type",
|
||||
field=models.CharField(
|
||||
choices=[("script", "Script"), ("server", "Server"), ("rest", "Rest")],
|
||||
default="script",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="resolved_action_rest",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="resolved_url_action_alert_template",
|
||||
to="core.urlaction",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="alerttemplate",
|
||||
name="resolved_action_type",
|
||||
field=models.CharField(
|
||||
choices=[("script", "Script"), ("server", "Server"), ("rest", "Rest")],
|
||||
default="script",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
@@ -8,16 +7,20 @@ from django.db import models
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from core.utils import run_server_script, run_url_rest_action
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from tacticalrmm.constants import (
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AlertSeverity,
|
||||
AlertTemplateActionType,
|
||||
AlertType,
|
||||
CheckType,
|
||||
DebugLogType,
|
||||
)
|
||||
from tacticalrmm.logger import logger
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
from tacticalrmm.utils import RE_DB_VALUE, get_db_value
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
@@ -95,6 +98,15 @@ class Alert(models.Model):
|
||||
def client(self) -> "Client":
|
||||
return self.agent.client
|
||||
|
||||
@property
|
||||
def get_result(self):
|
||||
if self.alert_type == AlertType.CHECK:
|
||||
return self.assigned_check.checkresults.get(agent=self.agent)
|
||||
elif self.alert_type == AlertType.TASK:
|
||||
return self.assigned_task.taskresults.get(agent=self.agent)
|
||||
|
||||
return None
|
||||
|
||||
def resolve(self) -> None:
|
||||
self.resolved = True
|
||||
self.resolved_on = djangotime.now()
|
||||
@@ -106,6 +118,9 @@ class Alert(models.Model):
|
||||
def create_or_return_availability_alert(
|
||||
cls, agent: Agent, skip_create: bool = False
|
||||
) -> Optional[Alert]:
|
||||
if agent.maintenance_mode:
|
||||
return None
|
||||
|
||||
if not cls.objects.filter(
|
||||
agent=agent, alert_type=AlertType.AVAILABILITY, resolved=False
|
||||
).exists():
|
||||
@@ -118,7 +133,7 @@ class Alert(models.Model):
|
||||
agent=agent,
|
||||
alert_type=AlertType.AVAILABILITY,
|
||||
severity=AlertSeverity.ERROR,
|
||||
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||
message=f"{agent.hostname} in {agent.client.name}, {agent.site.name} is overdue.",
|
||||
hidden=True,
|
||||
),
|
||||
)
|
||||
@@ -154,6 +169,9 @@ class Alert(models.Model):
|
||||
alert_severity: Optional[str] = None,
|
||||
skip_create: bool = False,
|
||||
) -> "Optional[Alert]":
|
||||
if agent.maintenance_mode:
|
||||
return None
|
||||
|
||||
# need to pass agent if the check is a policy
|
||||
if not cls.objects.filter(
|
||||
assigned_check=check,
|
||||
@@ -169,15 +187,17 @@ class Alert(models.Model):
|
||||
assigned_check=check,
|
||||
agent=agent,
|
||||
alert_type=AlertType.CHECK,
|
||||
severity=check.alert_severity
|
||||
if check.check_type
|
||||
not in {
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
}
|
||||
else alert_severity,
|
||||
severity=(
|
||||
check.alert_severity
|
||||
if check.check_type
|
||||
not in {
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
}
|
||||
else alert_severity
|
||||
),
|
||||
message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||
hidden=True,
|
||||
),
|
||||
@@ -216,6 +236,9 @@ class Alert(models.Model):
|
||||
agent: "Agent",
|
||||
skip_create: bool = False,
|
||||
) -> "Optional[Alert]":
|
||||
if agent.maintenance_mode:
|
||||
return None
|
||||
|
||||
if not cls.objects.filter(
|
||||
assigned_task=task,
|
||||
agent=agent,
|
||||
@@ -270,7 +293,9 @@ class Alert(models.Model):
|
||||
from agents.models import Agent, AgentHistory
|
||||
from autotasks.models import TaskResult
|
||||
from checks.models import CheckResult
|
||||
from core.models import CoreSettings
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
# set variables
|
||||
dashboard_severities = None
|
||||
email_severities = None
|
||||
@@ -281,7 +306,7 @@ class Alert(models.Model):
|
||||
alert_interval = None
|
||||
email_task = None
|
||||
text_task = None
|
||||
run_script_action = None
|
||||
should_run_script_or_webhook = False
|
||||
|
||||
# check what the instance passed is
|
||||
if isinstance(instance, Agent):
|
||||
@@ -307,7 +332,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.agent_always_email
|
||||
always_text = alert_template.agent_always_text
|
||||
alert_interval = alert_template.agent_periodic_alert_days
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
should_run_script_or_webhook = alert_template.agent_script_actions
|
||||
|
||||
elif isinstance(instance, CheckResult):
|
||||
from checks.tasks import (
|
||||
@@ -358,7 +383,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
alert_interval = alert_template.check_periodic_alert_days
|
||||
run_script_action = alert_template.check_script_actions
|
||||
should_run_script_or_webhook = alert_template.check_script_actions
|
||||
|
||||
elif isinstance(instance, TaskResult):
|
||||
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
||||
@@ -392,7 +417,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
alert_interval = alert_template.task_periodic_alert_days
|
||||
run_script_action = alert_template.task_script_actions
|
||||
should_run_script_or_webhook = alert_template.task_script_actions
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -420,12 +445,23 @@ class Alert(models.Model):
|
||||
alert.hidden = False
|
||||
alert.save(update_fields=["hidden"])
|
||||
|
||||
# TODO rework this
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
email_alert = False
|
||||
always_email = False
|
||||
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
email_alert = False
|
||||
always_email = False
|
||||
|
||||
# send email if enabled
|
||||
if email_alert or always_email:
|
||||
# check if alert template is set and specific severities are configured
|
||||
if (
|
||||
not alert_template
|
||||
or alert_template
|
||||
if not alert_template or (
|
||||
alert_template
|
||||
and email_severities
|
||||
and alert.severity in email_severities
|
||||
):
|
||||
@@ -434,41 +470,89 @@ class Alert(models.Model):
|
||||
alert_interval=alert_interval,
|
||||
)
|
||||
|
||||
# TODO rework this
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
text_alert = False
|
||||
always_text = False
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
text_alert = False
|
||||
always_text = False
|
||||
|
||||
# send text if enabled
|
||||
if text_alert or always_text:
|
||||
# check if alert template is set and specific severities are configured
|
||||
if (
|
||||
not alert_template
|
||||
or alert_template
|
||||
and text_severities
|
||||
and alert.severity in text_severities
|
||||
if not alert_template or (
|
||||
alert_template and text_severities and alert.severity in text_severities
|
||||
):
|
||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||
|
||||
# check if any scripts should be run
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.action
|
||||
and run_script_action
|
||||
and not alert.action_run
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-failure",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.action_env_vars,
|
||||
)
|
||||
# check if any scripts/webhooks should be run
|
||||
if alert_template and not alert.action_run and should_run_script_or_webhook:
|
||||
if (
|
||||
alert_template.action_type == AlertTemplateActionType.SCRIPT
|
||||
and alert_template.action
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-failure",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert.parse_script_args(alert_template.action_env_vars),
|
||||
)
|
||||
elif (
|
||||
alert_template.action_type == AlertTemplateActionType.SERVER
|
||||
and alert_template.action
|
||||
):
|
||||
stdout, stderr, execution_time, retcode = run_server_script(
|
||||
body=alert_template.action.script_body,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
env_vars=alert.parse_script_args(alert_template.action_env_vars),
|
||||
shell=alert_template.action.shell,
|
||||
)
|
||||
|
||||
r = {
|
||||
"retcode": retcode,
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
|
||||
elif alert_template.action_type == AlertTemplateActionType.REST:
|
||||
if (
|
||||
alert.severity == AlertSeverity.INFO
|
||||
and not core.notify_on_info_alerts
|
||||
or alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
return
|
||||
else:
|
||||
output, status = run_url_rest_action(
|
||||
action_id=alert_template.action_rest.id, instance=alert
|
||||
)
|
||||
logger.debug(f"{output=} {status=}")
|
||||
|
||||
r = {
|
||||
"stdout": output,
|
||||
"stderr": "",
|
||||
"execution_time": 0,
|
||||
"retcode": status,
|
||||
}
|
||||
else:
|
||||
return
|
||||
|
||||
# command was successful
|
||||
if isinstance(r, dict):
|
||||
@@ -479,11 +563,17 @@ class Alert(models.Model):
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
if alert_template.action_type == AlertTemplateActionType.SCRIPT:
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
else:
|
||||
DebugLog.error(
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on server for failure alert",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def handle_alert_resolve(
|
||||
@@ -492,13 +582,18 @@ class Alert(models.Model):
|
||||
from agents.models import Agent, AgentHistory
|
||||
from autotasks.models import TaskResult
|
||||
from checks.models import CheckResult
|
||||
from core.models import CoreSettings
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
# set variables
|
||||
email_severities = None
|
||||
text_severities = None
|
||||
email_on_resolved = False
|
||||
text_on_resolved = False
|
||||
resolved_email_task = None
|
||||
resolved_text_task = None
|
||||
run_script_action = None
|
||||
should_run_script_or_webhook = False
|
||||
|
||||
# check what the instance passed is
|
||||
if isinstance(instance, Agent):
|
||||
@@ -514,7 +609,9 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.agent_email_on_resolved
|
||||
text_on_resolved = alert_template.agent_text_on_resolved
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
should_run_script_or_webhook = alert_template.agent_script_actions
|
||||
email_severities = [AlertSeverity.ERROR]
|
||||
text_severities = [AlertSeverity.ERROR]
|
||||
|
||||
if agent.overdue_email_alert:
|
||||
email_on_resolved = True
|
||||
@@ -537,7 +634,15 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.check_email_on_resolved
|
||||
text_on_resolved = alert_template.check_text_on_resolved
|
||||
run_script_action = alert_template.check_script_actions
|
||||
should_run_script_or_webhook = alert_template.check_script_actions
|
||||
email_severities = alert_template.check_email_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
text_severities = alert_template.check_text_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
|
||||
elif isinstance(instance, TaskResult):
|
||||
from autotasks.tasks import (
|
||||
@@ -555,7 +660,15 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.task_email_on_resolved
|
||||
text_on_resolved = alert_template.task_text_on_resolved
|
||||
run_script_action = alert_template.task_script_actions
|
||||
should_run_script_or_webhook = alert_template.task_script_actions
|
||||
email_severities = alert_template.task_email_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
text_severities = alert_template.task_text_alert_severity or [
|
||||
AlertSeverity.ERROR,
|
||||
AlertSeverity.WARNING,
|
||||
]
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -570,36 +683,103 @@ class Alert(models.Model):
|
||||
|
||||
# check if a resolved email notification should be send
|
||||
if email_on_resolved and not alert.resolved_email_sent:
|
||||
resolved_email_task.delay(pk=alert.pk)
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
pass
|
||||
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
pass
|
||||
elif email_severities and alert.severity not in email_severities:
|
||||
pass
|
||||
else:
|
||||
resolved_email_task.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved text should be sent
|
||||
if text_on_resolved and not alert.resolved_sms_sent:
|
||||
resolved_text_task.delay(pk=alert.pk)
|
||||
if alert.severity == AlertSeverity.INFO and not core.notify_on_info_alerts:
|
||||
pass
|
||||
|
||||
# check if resolved script should be run
|
||||
elif (
|
||||
alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
pass
|
||||
elif text_severities and alert.severity not in text_severities:
|
||||
pass
|
||||
else:
|
||||
resolved_text_task.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved script/webhook should be run
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and run_script_action
|
||||
and not alert.resolved_action_run
|
||||
and should_run_script_or_webhook
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.action,
|
||||
username="alert-action-resolved",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.resolved_action_env_vars,
|
||||
)
|
||||
if (
|
||||
alert_template.resolved_action_type == AlertTemplateActionType.SCRIPT
|
||||
and alert_template.resolved_action
|
||||
):
|
||||
hist = AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AgentHistoryType.SCRIPT_RUN,
|
||||
script=alert_template.resolved_action,
|
||||
username="alert-action-resolved",
|
||||
)
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
history_pk=hist.pk,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
run_as_user=False,
|
||||
env_vars=alert_template.resolved_action_env_vars,
|
||||
)
|
||||
elif (
|
||||
alert_template.resolved_action_type == AlertTemplateActionType.SERVER
|
||||
and alert_template.resolved_action
|
||||
):
|
||||
stdout, stderr, execution_time, retcode = run_server_script(
|
||||
body=alert_template.resolved_action.script_body,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
env_vars=alert.parse_script_args(
|
||||
alert_template.resolved_action_env_vars
|
||||
),
|
||||
shell=alert_template.resolved_action.shell,
|
||||
)
|
||||
r = {
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"execution_time": execution_time,
|
||||
"retcode": retcode,
|
||||
}
|
||||
|
||||
elif alert_template.action_type == AlertTemplateActionType.REST:
|
||||
if (
|
||||
alert.severity == AlertSeverity.INFO
|
||||
and not core.notify_on_info_alerts
|
||||
or alert.severity == AlertSeverity.WARNING
|
||||
and not core.notify_on_warning_alerts
|
||||
):
|
||||
return
|
||||
else:
|
||||
output, status = run_url_rest_action(
|
||||
action_id=alert_template.resolved_action_rest.id, instance=alert
|
||||
)
|
||||
logger.debug(f"{output=} {status=}")
|
||||
|
||||
r = {
|
||||
"stdout": output,
|
||||
"stderr": "",
|
||||
"execution_time": 0,
|
||||
"retcode": status,
|
||||
}
|
||||
else:
|
||||
return
|
||||
|
||||
# command was successful
|
||||
if isinstance(r, dict):
|
||||
@@ -612,39 +792,36 @@ class Alert(models.Model):
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
if (
|
||||
alert_template.resolved_action_type
|
||||
== AlertTemplateActionType.SCRIPT
|
||||
):
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
else:
|
||||
DebugLog.error(
|
||||
log_type=DebugLogType.SCRIPTING,
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on server for resolved alert",
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: List[str]) -> List[str]:
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = []
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||
|
||||
for arg in args:
|
||||
match = pattern.match(arg)
|
||||
if match:
|
||||
name = match.group(1)
|
||||
temp_arg = arg
|
||||
for string, model, prop in RE_DB_VALUE.findall(arg):
|
||||
value = get_db_value(string=f"{model}.{prop}", instance=self)
|
||||
|
||||
# check if attr exists and isn't a function
|
||||
if hasattr(self, name) and not callable(getattr(self, name)):
|
||||
value = f"'{getattr(self, name)}'"
|
||||
else:
|
||||
continue
|
||||
if value is not None:
|
||||
temp_arg = temp_arg.replace(string, f"'{str(value)}'")
|
||||
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
|
||||
except Exception as e:
|
||||
DebugLog.error(log_type=DebugLogType.SCRIPTING, message=str(e))
|
||||
continue
|
||||
|
||||
else:
|
||||
temp_args.append(arg)
|
||||
temp_args.append(temp_arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
@@ -653,6 +830,11 @@ class AlertTemplate(BaseAuditModel):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
action_type = models.CharField(
|
||||
max_length=10,
|
||||
choices=AlertTemplateActionType.choices,
|
||||
default=AlertTemplateActionType.SCRIPT,
|
||||
)
|
||||
action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="alert_template",
|
||||
@@ -660,6 +842,13 @@ class AlertTemplate(BaseAuditModel):
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
action_rest = models.ForeignKey(
|
||||
"core.URLAction",
|
||||
related_name="url_action_alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -673,6 +862,11 @@ class AlertTemplate(BaseAuditModel):
|
||||
default=list,
|
||||
)
|
||||
action_timeout = models.PositiveIntegerField(default=15)
|
||||
resolved_action_type = models.CharField(
|
||||
max_length=10,
|
||||
choices=AlertTemplateActionType.choices,
|
||||
default=AlertTemplateActionType.SCRIPT,
|
||||
)
|
||||
resolved_action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="resolved_alert_template",
|
||||
@@ -680,6 +874,13 @@ class AlertTemplate(BaseAuditModel):
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
resolved_action_rest = models.ForeignKey(
|
||||
"core.URLAction",
|
||||
related_name="resolved_url_action_alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
resolved_action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -718,7 +919,8 @@ class AlertTemplate(BaseAuditModel):
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
# fmt: off
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
@@ -742,7 +944,8 @@ class AlertTemplate(BaseAuditModel):
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
# fmt: off
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
@@ -766,7 +969,8 @@ class AlertTemplate(BaseAuditModel):
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
# fmt: off
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True) # should be renamed because also deals with webhooks
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import TYPE_CHECKING
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.constants import AlertTemplateActionType
|
||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -53,4 +54,17 @@ class AlertTemplatePerms(permissions.BasePermission):
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_alerttemplates")
|
||||
|
||||
if r.method in ("POST", "PUT", "PATCH"):
|
||||
# ensure only users with explicit run server script perms can add/modify alert templates
|
||||
# while also still requiring the manage alert template perm
|
||||
if isinstance(r.data, dict):
|
||||
if (
|
||||
r.data.get("action_type") == AlertTemplateActionType.SERVER
|
||||
or r.data.get("resolved_action_type")
|
||||
== AlertTemplateActionType.SERVER
|
||||
):
|
||||
return _has_perm(r, "can_run_server_scripts") and _has_perm(
|
||||
r, "can_manage_alerttemplates"
|
||||
)
|
||||
|
||||
return _has_perm(r, "can_manage_alerttemplates")
|
||||
|
||||
@@ -3,6 +3,7 @@ from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from automation.serializers import PolicySerializer
|
||||
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
||||
from tacticalrmm.constants import AlertTemplateActionType
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
|
||||
@@ -25,14 +26,29 @@ class AlertTemplateSerializer(ModelSerializer):
|
||||
task_settings = ReadOnlyField(source="has_task_settings")
|
||||
core_settings = ReadOnlyField(source="has_core_settings")
|
||||
default_template = ReadOnlyField(source="is_default_template")
|
||||
action_name = ReadOnlyField(source="action.name")
|
||||
resolved_action_name = ReadOnlyField(source="resolved_action.name")
|
||||
action_name = SerializerMethodField()
|
||||
resolved_action_name = SerializerMethodField()
|
||||
applied_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
def get_action_name(self, obj):
|
||||
if obj.action_type == AlertTemplateActionType.REST and obj.action_rest:
|
||||
return obj.action_rest.name
|
||||
|
||||
return obj.action.name if obj.action else ""
|
||||
|
||||
def get_resolved_action_name(self, obj):
|
||||
if (
|
||||
obj.resolved_action_type == AlertTemplateActionType.REST
|
||||
and obj.resolved_action_rest
|
||||
):
|
||||
return obj.resolved_action_rest.name
|
||||
|
||||
return obj.resolved_action.name if obj.resolved_action else ""
|
||||
|
||||
def get_applied_count(self, instance):
|
||||
return (
|
||||
instance.policies.count()
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timedelta
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import TaskResult
|
||||
from core.tasks import cache_db_fields_task, resolve_alerts_task
|
||||
from core.utils import get_core_settings
|
||||
from tacticalrmm.constants import AgentMonType, AlertSeverity, AlertType, CheckStatus
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.constants import (
|
||||
AgentMonType,
|
||||
AlertSeverity,
|
||||
AlertType,
|
||||
CheckStatus,
|
||||
URLActionType,
|
||||
)
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
@@ -28,6 +33,7 @@ class TestAlertsViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
"""
|
||||
def test_get_alerts(self):
|
||||
url = "/alerts/"
|
||||
|
||||
@@ -39,14 +45,14 @@ class TestAlertsViews(TacticalTestCase):
|
||||
alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
agent=agent,
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
||||
severity=AlertSeverity.WARNING,
|
||||
_quantity=3,
|
||||
)
|
||||
baker.make(
|
||||
"alerts.Alert",
|
||||
assigned_check=check,
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
||||
severity=AlertSeverity.ERROR,
|
||||
_quantity=7,
|
||||
)
|
||||
@@ -55,7 +61,7 @@ class TestAlertsViews(TacticalTestCase):
|
||||
assigned_task=task,
|
||||
snoozed=True,
|
||||
snooze_until=djangotime.now(),
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
||||
_quantity=2,
|
||||
)
|
||||
baker.make(
|
||||
@@ -63,7 +69,7 @@ class TestAlertsViews(TacticalTestCase):
|
||||
agent=agent,
|
||||
resolved=True,
|
||||
resolved_on=djangotime.now(),
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
alert_time=seq(djangotime.now(), timedelta(days=15)),
|
||||
_quantity=9,
|
||||
)
|
||||
|
||||
@@ -120,13 +126,14 @@ class TestAlertsViews(TacticalTestCase):
|
||||
self.assertEqual(len(resp.data), req["count"])
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
"""
|
||||
|
||||
def test_add_alert(self):
|
||||
url = "/alerts/"
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
data = {
|
||||
"alert_time": datetime.now(),
|
||||
"alert_time": djangotime.now(),
|
||||
"agent": agent.id,
|
||||
"severity": "warning",
|
||||
"alert_type": "availability",
|
||||
@@ -275,12 +282,32 @@ class TestAlertsViews(TacticalTestCase):
|
||||
resp = self.client.get("/alerts/templates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
url = f"/alerts/templates/{alert_template.pk}/"
|
||||
agent_script = baker.make("scripts.Script")
|
||||
server_script = baker.make("scripts.Script")
|
||||
webhook = baker.make("core.URLAction", action_type=URLActionType.REST)
|
||||
|
||||
alert_template_agent_script = baker.make(
|
||||
"alerts.AlertTemplate", action=agent_script
|
||||
)
|
||||
url = f"/alerts/templates/{alert_template_agent_script.pk}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template)
|
||||
serializer = AlertTemplateSerializer(alert_template_agent_script)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
alert_template_server_script = baker.make(
|
||||
"alerts.AlertTemplate", action=server_script
|
||||
)
|
||||
url = f"/alerts/templates/{alert_template_server_script.pk}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template_server_script)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
alert_template_webhook = baker.make("alerts.AlertTemplate", action_rest=webhook)
|
||||
url = f"/alerts/templates/{alert_template_webhook.pk}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template_webhook)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
@@ -363,7 +390,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
not_snoozed = baker.make(
|
||||
"alerts.Alert",
|
||||
snoozed=True,
|
||||
snooze_until=seq(datetime.now(), timedelta(days=15)),
|
||||
snooze_until=seq(djangotime.now(), timedelta(days=15)),
|
||||
_quantity=5,
|
||||
)
|
||||
|
||||
@@ -371,7 +398,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
snoozed = baker.make(
|
||||
"alerts.Alert",
|
||||
snoozed=True,
|
||||
snooze_until=seq(datetime.now(), timedelta(days=-15)),
|
||||
snooze_until=seq(djangotime.now(), timedelta(days=-15)),
|
||||
_quantity=5,
|
||||
)
|
||||
|
||||
@@ -1427,6 +1454,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"run_as_user": False,
|
||||
"env_vars": ["hello=world", "foo=bar"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=30, wait=True)
|
||||
@@ -1458,6 +1487,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"run_as_user": False,
|
||||
"env_vars": ["resolved=action", "env=vars"],
|
||||
"id": AgentHistory.objects.last().pk, # type: ignore
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
|
||||
nats_cmd.assert_called_with(data, timeout=35, wait=True)
|
||||
|
||||
@@ -25,12 +25,16 @@ class GetAddAlerts(APIView):
|
||||
def patch(self, request):
|
||||
# top 10 alerts for dashboard icon
|
||||
if "top" in request.data.keys():
|
||||
alerts = Alert.objects.filter(
|
||||
resolved=False, snoozed=False, hidden=False
|
||||
).order_by("alert_time")[: int(request.data["top"])]
|
||||
count = Alert.objects.filter(
|
||||
resolved=False, snoozed=False, hidden=False
|
||||
).count()
|
||||
alerts = (
|
||||
Alert.objects.filter_by_role(request.user) # type: ignore
|
||||
.filter(resolved=False, snoozed=False, hidden=False)
|
||||
.order_by("alert_time")[: int(request.data["top"])]
|
||||
)
|
||||
count = (
|
||||
Alert.objects.filter_by_role(request.user) # type: ignore
|
||||
.filter(resolved=False, snoozed=False, hidden=False)
|
||||
.count()
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"alerts_count": count,
|
||||
|
||||
@@ -22,4 +22,12 @@ def get_agent_config() -> AgentCheckInConfig:
|
||||
*getattr(settings, "CHECKIN_SYNCMESH", (800, 1200))
|
||||
),
|
||||
limit_data=getattr(settings, "LIMIT_DATA", False),
|
||||
install_nushell=getattr(settings, "INSTALL_NUSHELL", False),
|
||||
install_nushell_version=getattr(settings, "INSTALL_NUSHELL_VERSION", ""),
|
||||
install_nushell_url=getattr(settings, "INSTALL_NUSHELL_URL", ""),
|
||||
nushell_enable_config=getattr(settings, "NUSHELL_ENABLE_CONFIG", False),
|
||||
install_deno=getattr(settings, "INSTALL_DENO", False),
|
||||
install_deno_version=getattr(settings, "INSTALL_DENO_VERSION", ""),
|
||||
install_deno_url=getattr(settings, "INSTALL_DENO_URL", ""),
|
||||
deno_default_permissions=getattr(settings, "DENO_DEFAULT_PERMISSIONS", ""),
|
||||
)
|
||||
|
||||
@@ -12,14 +12,16 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentHistory
|
||||
from agents.models import Agent, AgentHistory, Note
|
||||
from agents.serializers import AgentHistorySerializer
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from apiv3.utils import get_agent_config
|
||||
from autotasks.models import AutomatedTask, TaskResult
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskResultSerializer
|
||||
from checks.constants import CHECK_DEFER, CHECK_RESULT_DEFER
|
||||
from checks.models import Check, CheckResult
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from core.tasks import sync_mesh_perms_task
|
||||
from core.utils import (
|
||||
download_mesh_agent,
|
||||
get_core_settings,
|
||||
@@ -31,17 +33,20 @@ from logs.models import DebugLog, PendingAction
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.constants import (
|
||||
AGENT_DEFER,
|
||||
TRMM_MAX_REQUEST_SIZE,
|
||||
AgentHistoryType,
|
||||
AgentMonType,
|
||||
AgentPlat,
|
||||
AuditActionType,
|
||||
AuditObjType,
|
||||
CheckStatus,
|
||||
CustomFieldModel,
|
||||
DebugLogType,
|
||||
GoArch,
|
||||
MeshAgentIdent,
|
||||
PAStatus,
|
||||
)
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.helpers import make_random_password, notify_error
|
||||
from tacticalrmm.utils import reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
@@ -338,6 +343,12 @@ class TaskRunner(APIView):
|
||||
AutomatedTask.objects.select_related("custom_field"), pk=pk
|
||||
)
|
||||
|
||||
content_length = request.META.get("CONTENT_LENGTH")
|
||||
if content_length and int(content_length) > TRMM_MAX_REQUEST_SIZE:
|
||||
request.data["stdout"] = ""
|
||||
request.data["stderr"] = "Content truncated due to excessive request size."
|
||||
request.data["retcode"] = 1
|
||||
|
||||
# get task result or create if doesn't exist
|
||||
try:
|
||||
task_result = (
|
||||
@@ -356,7 +367,7 @@ class TaskRunner(APIView):
|
||||
|
||||
AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type=AuditActionType.TASK_RUN,
|
||||
type=AgentHistoryType.TASK_RUN,
|
||||
command=task.name,
|
||||
script_results=request.data,
|
||||
)
|
||||
@@ -426,8 +437,8 @@ class MeshExe(APIView):
|
||||
|
||||
try:
|
||||
return download_mesh_agent(dl_url)
|
||||
except:
|
||||
return notify_error("Unable to download mesh agent exe")
|
||||
except Exception as e:
|
||||
return notify_error(f"Unable to download mesh agent: {e}")
|
||||
|
||||
|
||||
class NewAgent(APIView):
|
||||
@@ -457,7 +468,7 @@ class NewAgent(APIView):
|
||||
user = User.objects.create_user( # type: ignore
|
||||
username=request.data["agent_id"],
|
||||
agent=agent,
|
||||
password=User.objects.make_random_password(60), # type: ignore
|
||||
password=make_random_password(len=60),
|
||||
)
|
||||
|
||||
token = Token.objects.create(user=user)
|
||||
@@ -481,6 +492,8 @@ class NewAgent(APIView):
|
||||
)
|
||||
|
||||
ret = {"pk": agent.pk, "token": token.key}
|
||||
sync_mesh_perms_task.delay()
|
||||
cache_agents_alert_template.delay()
|
||||
return Response(ret)
|
||||
|
||||
|
||||
@@ -559,12 +572,49 @@ class AgentHistoryResult(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, agentid, pk):
|
||||
content_length = request.META.get("CONTENT_LENGTH")
|
||||
if content_length and int(content_length) > TRMM_MAX_REQUEST_SIZE:
|
||||
|
||||
request.data["script_results"]["stdout"] = ""
|
||||
request.data["script_results"][
|
||||
"stderr"
|
||||
] = "Content truncated due to excessive request size."
|
||||
request.data["script_results"]["retcode"] = 1
|
||||
|
||||
hist = get_object_or_404(
|
||||
AgentHistory.objects.filter(agent__agent_id=agentid), pk=pk
|
||||
AgentHistory.objects.select_related("custom_field").filter(
|
||||
agent__agent_id=agentid
|
||||
),
|
||||
pk=pk,
|
||||
)
|
||||
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
|
||||
if hist.custom_field:
|
||||
if hist.custom_field.model == CustomFieldModel.AGENT:
|
||||
field = hist.custom_field.get_or_create_field_value(hist.agent)
|
||||
elif hist.custom_field.model == CustomFieldModel.CLIENT:
|
||||
field = hist.custom_field.get_or_create_field_value(hist.agent.client)
|
||||
elif hist.custom_field.model == CustomFieldModel.SITE:
|
||||
field = hist.custom_field.get_or_create_field_value(hist.agent.site)
|
||||
|
||||
r = request.data["script_results"]["stdout"]
|
||||
value = (
|
||||
r.strip()
|
||||
if hist.collector_all_output
|
||||
else r.strip().split("\n")[-1].strip()
|
||||
)
|
||||
|
||||
field.save_to_field(value)
|
||||
|
||||
if hist.save_to_agent_note:
|
||||
Note.objects.create(
|
||||
agent=hist.agent,
|
||||
user=request.user,
|
||||
note=request.data["script_results"]["stdout"],
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ class Policy(BaseAuditModel):
|
||||
old_policy: Optional[Policy] = (
|
||||
type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
)
|
||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||
super().save(old_model=old_policy, *args, **kwargs)
|
||||
|
||||
# check if alert template was changes and cache on agents
|
||||
if old_policy:
|
||||
@@ -68,10 +68,7 @@ class Policy(BaseAuditModel):
|
||||
cache.delete_many_pattern("site_server_*")
|
||||
cache.delete_many_pattern("agent_*")
|
||||
|
||||
super(Policy, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
@@ -126,7 +126,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
cache_alert_template.called_once()
|
||||
cache_alert_template.assert_called_once()
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
|
||||
@@ -7,10 +7,4 @@ class Command(BaseCommand):
|
||||
help = "Checks for orphaned tasks on all agents and removes them"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
remove_orphaned_win_tasks.s()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"The task has been initiated. Check the Debug Log in the UI for progress."
|
||||
)
|
||||
)
|
||||
remove_orphaned_win_tasks()
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-23 04:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0038_add_missing_env_vars'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='task_type',
|
||||
field=models.CharField(choices=[('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('monthlydow', 'Monthly Day of Week'), ('checkfailure', 'On Check Failure'), ('manual', 'Manual'), ('runonce', 'Run Once'), ('onboarding', 'Onboarding'), ('scheduled', 'Scheduled')], default='manual', max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0039_alter_automatedtask_task_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="taskresult",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -1,10 +1,10 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
from contextlib import suppress
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
||||
|
||||
import pytz
|
||||
from django.core.cache import cache
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
@@ -14,12 +14,11 @@ from django.db.utils import DatabaseError
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from core.utils import get_core_settings
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.constants import (
|
||||
FIELDS_TRIGGER_TASK_UPDATE_AGENT,
|
||||
POLICY_TASK_FIELDS_TO_COPY,
|
||||
AlertSeverity,
|
||||
DebugLogType,
|
||||
TaskStatus,
|
||||
TaskSyncStatus,
|
||||
TaskType,
|
||||
@@ -31,6 +30,7 @@ if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from tacticalrmm.helpers import has_script_actions, has_webhook
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
from tacticalrmm.utils import (
|
||||
bitdays_to_string,
|
||||
@@ -46,6 +46,9 @@ def generate_task_name() -> str:
|
||||
return "TacticalRMM_" + "".join(random.choice(chars) for i in range(35))
|
||||
|
||||
|
||||
logger = logging.getLogger("trmm")
|
||||
|
||||
|
||||
class AutomatedTask(BaseAuditModel):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
@@ -149,7 +152,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
# get old task if exists
|
||||
old_task = AutomatedTask.objects.get(pk=self.pk) if self.pk else None
|
||||
super(AutomatedTask, self).save(old_model=old_task, *args, **kwargs)
|
||||
super().save(old_model=old_task, *args, **kwargs)
|
||||
|
||||
# check if fields were updated that require a sync to the agent and set status to notsynced
|
||||
if old_task:
|
||||
@@ -172,10 +175,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
cache.delete_many_pattern("site_*_tasks")
|
||||
cache.delete_many_pattern("agent_*_tasks")
|
||||
|
||||
super(AutomatedTask, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def schedule(self) -> Optional[str]:
|
||||
@@ -209,6 +209,9 @@ class AutomatedTask(BaseAuditModel):
|
||||
weeks = bitweeks_to_string(self.monthly_weeks_of_month)
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"Runs on {months} on {weeks} on {days} at {run_time_nice}"
|
||||
elif self.task_type == TaskType.ONBOARDING:
|
||||
return "Onboarding: Runs once on task creation."
|
||||
return None
|
||||
|
||||
@property
|
||||
def fields_that_trigger_task_update_on_agent(self) -> List[str]:
|
||||
@@ -236,64 +239,56 @@ class AutomatedTask(BaseAuditModel):
|
||||
task.save()
|
||||
|
||||
# agent version >= 1.8.0
|
||||
def generate_nats_task_payload(
|
||||
self, agent: "Optional[Agent]" = None, editing: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
def generate_nats_task_payload(self) -> Dict[str, Any]:
|
||||
task = {
|
||||
"pk": self.pk,
|
||||
"type": "rmm",
|
||||
"name": self.win_task_name,
|
||||
"overwrite_task": editing,
|
||||
"overwrite_task": True,
|
||||
"enabled": self.enabled,
|
||||
"trigger": self.task_type
|
||||
if self.task_type != TaskType.CHECK_FAILURE
|
||||
else TaskType.MANUAL,
|
||||
"trigger": (
|
||||
self.task_type
|
||||
if self.task_type != TaskType.CHECK_FAILURE
|
||||
else TaskType.MANUAL
|
||||
),
|
||||
"multiple_instances": self.task_instance_policy or 0,
|
||||
"delete_expired_task_after": self.remove_if_not_scheduled
|
||||
if self.expire_date
|
||||
else False,
|
||||
"start_when_available": self.run_asap_after_missed
|
||||
if self.task_type != TaskType.RUN_ONCE
|
||||
else True,
|
||||
"delete_expired_task_after": (
|
||||
self.remove_if_not_scheduled if self.expire_date else False
|
||||
),
|
||||
"start_when_available": (
|
||||
self.run_asap_after_missed
|
||||
if self.task_type != TaskType.RUN_ONCE
|
||||
else True
|
||||
),
|
||||
}
|
||||
|
||||
if self.task_type in (
|
||||
TaskType.RUN_ONCE,
|
||||
TaskType.DAILY,
|
||||
TaskType.WEEKLY,
|
||||
TaskType.MONTHLY,
|
||||
TaskType.MONTHLY_DOW,
|
||||
TaskType.RUN_ONCE,
|
||||
):
|
||||
# set runonce task in future if creating and run_asap_after_missed is set
|
||||
if (
|
||||
not editing
|
||||
and self.task_type == TaskType.RUN_ONCE
|
||||
and self.run_asap_after_missed
|
||||
and agent
|
||||
and self.run_time_date
|
||||
< djangotime.now().astimezone(pytz.timezone(agent.timezone))
|
||||
):
|
||||
self.run_time_date = (
|
||||
djangotime.now() + djangotime.timedelta(minutes=5)
|
||||
).astimezone(pytz.timezone(agent.timezone))
|
||||
if not self.run_time_date:
|
||||
self.run_time_date = djangotime.now()
|
||||
|
||||
task["start_year"] = int(self.run_time_date.strftime("%Y"))
|
||||
task["start_month"] = int(self.run_time_date.strftime("%-m"))
|
||||
task["start_day"] = int(self.run_time_date.strftime("%-d"))
|
||||
task["start_hour"] = int(self.run_time_date.strftime("%-H"))
|
||||
task["start_min"] = int(self.run_time_date.strftime("%-M"))
|
||||
task["start_year"] = self.run_time_date.year
|
||||
task["start_month"] = self.run_time_date.month
|
||||
task["start_day"] = self.run_time_date.day
|
||||
task["start_hour"] = self.run_time_date.hour
|
||||
task["start_min"] = self.run_time_date.minute
|
||||
|
||||
if self.expire_date:
|
||||
task["expire_year"] = int(self.expire_date.strftime("%Y"))
|
||||
task["expire_month"] = int(self.expire_date.strftime("%-m"))
|
||||
task["expire_day"] = int(self.expire_date.strftime("%-d"))
|
||||
task["expire_hour"] = int(self.expire_date.strftime("%-H"))
|
||||
task["expire_min"] = int(self.expire_date.strftime("%-M"))
|
||||
task["expire_year"] = self.expire_date.year
|
||||
task["expire_month"] = self.expire_date.month
|
||||
task["expire_day"] = self.expire_date.day
|
||||
task["expire_hour"] = self.expire_date.hour
|
||||
task["expire_min"] = self.expire_date.minute
|
||||
|
||||
if self.random_task_delay:
|
||||
task["random_delay"] = convert_to_iso_duration(self.random_task_delay)
|
||||
|
||||
if self.task_repetition_interval:
|
||||
if self.task_repetition_interval and self.task_repetition_duration:
|
||||
task["repetition_interval"] = convert_to_iso_duration(
|
||||
self.task_repetition_interval
|
||||
)
|
||||
@@ -341,27 +336,24 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": self.generate_nats_task_payload(agent),
|
||||
"schedtaskpayload": self.generate_nats_task_payload(),
|
||||
}
|
||||
logger.debug(nats_data)
|
||||
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
task_result.sync_status = TaskSyncStatus.INITIAL
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"Unable to create scheduled task {self.name} on {task_result.agent.hostname}. It will be created when the agent checks in.",
|
||||
logger.error(
|
||||
f"Unable to create scheduled task {self.name} on {task_result.agent.hostname}: {r}"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname} task {self.name} was successfully created",
|
||||
logger.info(
|
||||
f"{task_result.agent.hostname} task {self.name} was successfully created."
|
||||
)
|
||||
|
||||
return "ok"
|
||||
@@ -380,27 +372,24 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": self.generate_nats_task_payload(editing=True),
|
||||
"schedtaskpayload": self.generate_nats_task_payload(),
|
||||
}
|
||||
logger.debug(nats_data)
|
||||
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5))
|
||||
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
task_result.sync_status = TaskSyncStatus.NOT_SYNCED
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"Unable to modify scheduled task {self.name} on {task_result.agent.hostname}({task_result.agent.agent_id}). It will try again on next agent checkin",
|
||||
logger.error(
|
||||
f"Unable to modify scheduled task {self.name} on {task_result.agent.hostname}: {r}"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
task_result.sync_status = TaskSyncStatus.SYNCED
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname} task {self.name} was successfully modified",
|
||||
logger.info(
|
||||
f"{task_result.agent.hostname} task {self.name} was successfully modified."
|
||||
)
|
||||
|
||||
return "ok"
|
||||
@@ -429,20 +418,13 @@ class AutomatedTask(BaseAuditModel):
|
||||
with suppress(DatabaseError):
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname} task {self.name} will be deleted on next checkin",
|
||||
logger.error(
|
||||
f"Unable to delete task {self.name} on {task_result.agent.hostname}: {r}"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
message=f"{task_result.agent.hostname}({task_result.agent.agent_id}) task {self.name} was deleted",
|
||||
)
|
||||
|
||||
logger.info(f"{task_result.agent.hostname} task {self.name} was deleted.")
|
||||
return "ok"
|
||||
|
||||
def run_win_task(self, agent: "Optional[Agent]" = None) -> str:
|
||||
@@ -465,18 +447,19 @@ class AutomatedTask(BaseAuditModel):
|
||||
return "ok"
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
has_autotask_notification = (
|
||||
self.dashboard_alert or self.email_alert or self.text_alert
|
||||
)
|
||||
has_alert_template_notification = alert_template and (
|
||||
alert_template.task_always_alert
|
||||
or alert_template.task_always_email
|
||||
or alert_template.task_always_text
|
||||
)
|
||||
return (
|
||||
self.dashboard_alert
|
||||
or self.email_alert
|
||||
or self.text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.task_always_alert
|
||||
or alert_template.task_always_email
|
||||
or alert_template.task_always_text
|
||||
)
|
||||
)
|
||||
has_autotask_notification
|
||||
or has_alert_template_notification
|
||||
or has_webhook(alert_template, "task")
|
||||
or has_script_actions(alert_template, "task")
|
||||
)
|
||||
|
||||
|
||||
@@ -486,6 +469,7 @@ class TaskResult(models.Model):
|
||||
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="taskresults",
|
||||
|
||||
@@ -2,6 +2,7 @@ from datetime import datetime
|
||||
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework import serializers
|
||||
from django.conf import settings
|
||||
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.constants import TaskType
|
||||
@@ -252,7 +253,13 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
|
||||
"shell": script.shell,
|
||||
"timeout": action["timeout"],
|
||||
"run_as_user": script.run_as_user,
|
||||
"env_vars": env_vars,
|
||||
"env_vars": Script.parse_script_env_vars(
|
||||
agent=agent,
|
||||
shell=script.shell,
|
||||
env_vars=env_vars,
|
||||
),
|
||||
"nushell_enable_config": settings.NUSHELL_ENABLE_CONFIG,
|
||||
"deno_default_permissions": settings.DENO_DEFAULT_PERMISSIONS,
|
||||
}
|
||||
)
|
||||
if actions_to_remove:
|
||||
|
||||
@@ -149,6 +149,7 @@ def remove_orphaned_win_tasks(self) -> str:
|
||||
for item in items
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
await nc.flush()
|
||||
await nc.close()
|
||||
|
||||
asyncio.run(_run())
|
||||
|
||||
@@ -417,7 +417,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "daily",
|
||||
"multiple_instances": 1,
|
||||
@@ -431,7 +431,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"day_interval": 1,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
self.assertEqual(
|
||||
@@ -470,7 +470,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "weekly",
|
||||
"multiple_instances": 2,
|
||||
@@ -490,7 +490,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"days_of_week": 127,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -518,7 +518,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "monthly",
|
||||
"multiple_instances": 1,
|
||||
@@ -538,7 +538,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"months_of_year": 1024,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -562,7 +562,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "monthlydow",
|
||||
"multiple_instances": 1,
|
||||
@@ -578,7 +578,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"weeks_of_month": 3,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -600,7 +600,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "runonce",
|
||||
"multiple_instances": 1,
|
||||
@@ -613,39 +613,10 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"start_min": int(task1.run_time_date.strftime("%-M")),
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test runonce with date in the past
|
||||
task1 = baker.make(
|
||||
"autotasks.AutomatedTask",
|
||||
agent=agent,
|
||||
name="test task 3",
|
||||
task_type=TaskType.RUN_ONCE,
|
||||
run_asap_after_missed=True,
|
||||
run_time_date=djangotime.datetime(2018, 6, 1, 23, 23, 23),
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
create_win_task_schedule(pk=task1.pk)
|
||||
nats_cmd.assert_called()
|
||||
|
||||
# check if task is scheduled for at most 5min in the future
|
||||
_, args, _ = nats_cmd.mock_calls[0]
|
||||
|
||||
current_minute = int(djangotime.now().strftime("%-M"))
|
||||
|
||||
if current_minute >= 55 and current_minute < 60:
|
||||
self.assertLess(
|
||||
args[0]["schedtaskpayload"]["start_min"],
|
||||
int(djangotime.now().strftime("%-M")),
|
||||
)
|
||||
else:
|
||||
self.assertGreater(
|
||||
args[0]["schedtaskpayload"]["start_min"],
|
||||
int(djangotime.now().strftime("%-M")),
|
||||
)
|
||||
|
||||
# test checkfailure task
|
||||
nats_cmd.reset_mock()
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
@@ -665,7 +636,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "manual",
|
||||
"multiple_instances": 1,
|
||||
@@ -673,7 +644,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"start_when_available": False,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
@@ -692,7 +663,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"pk": task1.pk,
|
||||
"type": "rmm",
|
||||
"name": task1.win_task_name,
|
||||
"overwrite_task": False,
|
||||
"overwrite_task": True,
|
||||
"enabled": True,
|
||||
"trigger": "manual",
|
||||
"multiple_instances": 1,
|
||||
@@ -700,7 +671,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"start_when_available": False,
|
||||
},
|
||||
},
|
||||
timeout=5,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from packaging import version as pyver
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -6,6 +7,8 @@ from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from tacticalrmm.constants import TaskType
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
|
||||
from .models import AutomatedTask
|
||||
@@ -40,6 +43,11 @@ class GetAddAutoTasks(APIView):
|
||||
if not _has_perm_on_agent(request.user, agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
if data["task_type"] == TaskType.ONBOARDING and pyver.parse(
|
||||
agent.version
|
||||
) < pyver.parse("2.6.0"):
|
||||
return notify_error("Onboarding tasks require agent >= 2.6.0")
|
||||
|
||||
data["agent"] = agent.pk
|
||||
|
||||
serializer = TaskSerializer(data=data)
|
||||
|
||||
0
api/tacticalrmm/beta/v1/__init__.py
Normal file
0
api/tacticalrmm/beta/v1/__init__.py
Normal file
0
api/tacticalrmm/beta/v1/agent/__init__.py
Normal file
0
api/tacticalrmm/beta/v1/agent/__init__.py
Normal file
37
api/tacticalrmm/beta/v1/agent/filter.py
Normal file
37
api/tacticalrmm/beta/v1/agent/filter.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import django_filters
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
class AgentFilter(django_filters.FilterSet):
|
||||
last_seen_range = django_filters.DateTimeFromToRangeFilter(field_name="last_seen")
|
||||
total_ram_range = django_filters.NumericRangeFilter(field_name="total_ram")
|
||||
patches_last_installed_range = django_filters.DateTimeFromToRangeFilter(
|
||||
field_name="patches_last_installed"
|
||||
)
|
||||
|
||||
client_id = django_filters.NumberFilter(method="client_id_filter")
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = [
|
||||
"id",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"operating_system",
|
||||
"plat",
|
||||
"monitoring_type",
|
||||
"needs_reboot",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"alert_template",
|
||||
"site",
|
||||
"policy",
|
||||
"last_seen_range",
|
||||
"total_ram_range",
|
||||
"patches_last_installed_range",
|
||||
]
|
||||
|
||||
def client_id_filter(self, queryset, name, value):
|
||||
if value:
|
||||
return queryset.filter(site__client__id=value)
|
||||
return queryset
|
||||
40
api/tacticalrmm/beta/v1/agent/views.py
Normal file
40
api/tacticalrmm/beta/v1/agent/views.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from agents.models import Agent
|
||||
from agents.permissions import AgentPerms
|
||||
from beta.v1.agent.filter import AgentFilter
|
||||
from beta.v1.pagination import StandardResultsSetPagination
|
||||
from ..serializers import DetailAgentSerializer, ListAgentSerializer
|
||||
|
||||
|
||||
class AgentViewSet(viewsets.ModelViewSet):
|
||||
permission_classes = [IsAuthenticated, AgentPerms]
|
||||
queryset = Agent.objects.all()
|
||||
pagination_class = StandardResultsSetPagination
|
||||
http_method_names = ["get", "put"]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_class = AgentFilter
|
||||
search_fields = ["hostname", "services"]
|
||||
ordering_fields = ["id"]
|
||||
ordering = ["id"]
|
||||
|
||||
def check_permissions(self, request: Request) -> None:
|
||||
if "agent_id" in request.query_params:
|
||||
self.kwargs["agent_id"] = request.query_params["agent_id"]
|
||||
super().check_permissions(request)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.method == "POST":
|
||||
self.permission_classes = [IsAuthenticated]
|
||||
return super().get_permissions()
|
||||
|
||||
def get_serializer_class(self) -> type[BaseSerializer]:
|
||||
if self.kwargs:
|
||||
if self.kwargs["pk"]:
|
||||
return DetailAgentSerializer
|
||||
return ListAgentSerializer
|
||||
0
api/tacticalrmm/beta/v1/client/__init__.py
Normal file
0
api/tacticalrmm/beta/v1/client/__init__.py
Normal file
13
api/tacticalrmm/beta/v1/client/views.py
Normal file
13
api/tacticalrmm/beta/v1/client/views.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
|
||||
from clients.models import Client
|
||||
from clients.permissions import ClientsPerms
|
||||
from ..serializers import ClientSerializer
|
||||
|
||||
|
||||
class ClientViewSet(viewsets.ModelViewSet):
|
||||
permission_classes = [IsAuthenticated, ClientsPerms]
|
||||
queryset = Client.objects.all()
|
||||
serializer_class = ClientSerializer
|
||||
http_method_names = ["get", "put"]
|
||||
7
api/tacticalrmm/beta/v1/pagination.py
Normal file
7
api/tacticalrmm/beta/v1/pagination.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
|
||||
|
||||
class StandardResultsSetPagination(PageNumberPagination):
|
||||
page_size = 100
|
||||
page_size_query_param = "page_size"
|
||||
max_page_size = 1000
|
||||
73
api/tacticalrmm/beta/v1/serializers.py
Normal file
73
api/tacticalrmm/beta/v1/serializers.py
Normal file
@@ -0,0 +1,73 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
|
||||
|
||||
class ListAgentSerializer(serializers.ModelSerializer[Agent]):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class DetailAgentSerializer(serializers.ModelSerializer[Agent]):
|
||||
status = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = (
|
||||
"version",
|
||||
"operating_system",
|
||||
"plat",
|
||||
"goarch",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"last_seen",
|
||||
"services",
|
||||
"public_ip",
|
||||
"total_ram",
|
||||
"disks",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"mesh_node_id",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
"offline_time",
|
||||
"overdue_time",
|
||||
"check_interval",
|
||||
"needs_reboot",
|
||||
"choco_installed",
|
||||
"wmi_detail",
|
||||
"patches_last_installed",
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
"block_policy_inheritance",
|
||||
"alert_template",
|
||||
"site",
|
||||
"policy",
|
||||
"status",
|
||||
"checks",
|
||||
"pending_actions_count",
|
||||
"cpu_model",
|
||||
"graphics",
|
||||
"local_ips",
|
||||
"make_model",
|
||||
"physical_disks",
|
||||
"serial_number",
|
||||
)
|
||||
|
||||
|
||||
class ClientSerializer(serializers.ModelSerializer[Client]):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class SiteSerializer(serializers.ModelSerializer[Site]):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
21
api/tacticalrmm/beta/v1/site/views.py
Normal file
21
api/tacticalrmm/beta/v1/site/views.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.filters import SearchFilter, OrderingFilter
|
||||
|
||||
from clients.models import Site
|
||||
from clients.permissions import SitesPerms
|
||||
from beta.v1.pagination import StandardResultsSetPagination
|
||||
from ..serializers import SiteSerializer
|
||||
|
||||
|
||||
class SiteViewSet(viewsets.ModelViewSet):
|
||||
permission_classes = [IsAuthenticated, SitesPerms]
|
||||
queryset = Site.objects.all()
|
||||
serializer_class = SiteSerializer
|
||||
pagination_class = StandardResultsSetPagination
|
||||
http_method_names = ["get", "put"]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
search_fields = ["name"]
|
||||
ordering_fields = ["id"]
|
||||
ordering = ["id"]
|
||||
12
api/tacticalrmm/beta/v1/urls.py
Normal file
12
api/tacticalrmm/beta/v1/urls.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from rest_framework import routers
|
||||
from .agent import views as agent
|
||||
from .client import views as client
|
||||
from .site import views as site
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
|
||||
router.register("agent", agent.AgentViewSet, basename="agent")
|
||||
router.register("client", client.ClientViewSet, basename="client")
|
||||
router.register("site", site.SiteViewSet, basename="site")
|
||||
|
||||
urlpatterns = router.urls
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.10 on 2024-02-19 05:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0031_check_env_vars"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="checkresult",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
]
|
||||
@@ -19,6 +19,7 @@ from tacticalrmm.constants import (
|
||||
EvtLogNames,
|
||||
EvtLogTypes,
|
||||
)
|
||||
from tacticalrmm.helpers import has_script_actions, has_webhook
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -168,10 +169,7 @@ class Check(BaseAuditModel):
|
||||
elif self.agent:
|
||||
cache.delete(f"agent_{self.agent.agent_id}_checks")
|
||||
|
||||
super(Check, self).save(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
# if check is a policy check clear cache on everything
|
||||
@@ -183,10 +181,7 @@ class Check(BaseAuditModel):
|
||||
elif self.agent:
|
||||
cache.delete(f"agent_{self.agent.agent_id}_checks")
|
||||
|
||||
super(Check, self).delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def readable_desc(self):
|
||||
@@ -236,18 +231,19 @@ class Check(BaseAuditModel):
|
||||
check.save()
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
has_check_notifications = (
|
||||
self.dashboard_alert or self.email_alert or self.text_alert
|
||||
)
|
||||
has_alert_template_notification = alert_template and (
|
||||
alert_template.check_always_alert
|
||||
or alert_template.check_always_email
|
||||
or alert_template.check_always_text
|
||||
)
|
||||
return (
|
||||
self.dashboard_alert
|
||||
or self.email_alert
|
||||
or self.text_alert
|
||||
or (
|
||||
alert_template
|
||||
and (
|
||||
alert_template.check_always_alert
|
||||
or alert_template.check_always_email
|
||||
or alert_template.check_always_text
|
||||
)
|
||||
)
|
||||
has_check_notifications
|
||||
or has_alert_template_notification
|
||||
or has_webhook(alert_template, "check")
|
||||
or has_script_actions(alert_template, "check")
|
||||
)
|
||||
|
||||
def add_check_history(
|
||||
@@ -290,6 +286,7 @@ class CheckResult(models.Model):
|
||||
class Meta:
|
||||
unique_together = (("agent", "assigned_check"),)
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="checkresults",
|
||||
@@ -338,10 +335,7 @@ class CheckResult(models.Model):
|
||||
):
|
||||
self.alert_severity = AlertSeverity.WARNING
|
||||
|
||||
super(CheckResult, self).save(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def history_info(self):
|
||||
@@ -371,9 +365,11 @@ class CheckResult(models.Model):
|
||||
if len(self.history) > 15:
|
||||
self.history = self.history[-15:]
|
||||
|
||||
update_fields.extend(["history"])
|
||||
update_fields.extend(["history", "more_info"])
|
||||
|
||||
avg = int(mean(self.history))
|
||||
txt = "Memory Usage" if check.check_type == CheckType.MEMORY else "CPU Load"
|
||||
self.more_info = f"Average {txt}: {avg}%"
|
||||
|
||||
if check.error_threshold and avg > check.error_threshold:
|
||||
self.status = CheckStatus.FAILING
|
||||
@@ -673,6 +669,7 @@ class CheckResult(models.Model):
|
||||
class CheckHistory(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
check_id = models.PositiveIntegerField(default=0)
|
||||
agent_id = models.CharField(max_length=200, null=True, blank=True)
|
||||
x = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
@@ -172,8 +172,13 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
if obj.check_type != CheckType.SCRIPT:
|
||||
return []
|
||||
|
||||
# check's env_vars override the script's env vars
|
||||
return obj.env_vars or obj.script.env_vars
|
||||
agent = self.context["agent"] if "agent" in self.context.keys() else obj.agent
|
||||
|
||||
return Script.parse_script_env_vars(
|
||||
agent=agent,
|
||||
shell=obj.script.shell,
|
||||
env_vars=obj.env_vars,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
|
||||
@@ -8,6 +8,7 @@ from alerts.models import Alert
|
||||
from checks.models import CheckResult
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.helpers import rand_range
|
||||
from tacticalrmm.logger import logger
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -120,9 +121,9 @@ def handle_resolved_check_email_alert_task(pk: int) -> str:
|
||||
def prune_check_history(older_than_days: int) -> str:
|
||||
from .models import CheckHistory
|
||||
|
||||
CheckHistory.objects.filter(
|
||||
x__lt=djangotime.make_aware(dt.datetime.today())
|
||||
- djangotime.timedelta(days=older_than_days)
|
||||
c, _ = CheckHistory.objects.filter(
|
||||
x__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
logger.info(f"Pruned {c} check history objects")
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -172,6 +172,31 @@ class TestCheckViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_reset_all_checks_status(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
baker.make("checks.CheckResult", assigned_check=check, agent=agent)
|
||||
baker.make(
|
||||
"checks.CheckHistory",
|
||||
check_id=check.id,
|
||||
agent_id=agent.agent_id,
|
||||
_quantity=30,
|
||||
)
|
||||
baker.make(
|
||||
"checks.CheckHistory",
|
||||
check_id=check.id,
|
||||
agent_id=agent.agent_id,
|
||||
_quantity=30,
|
||||
)
|
||||
|
||||
url = f"{base_url}/{agent.agent_id}/resetall/"
|
||||
|
||||
resp = self.client.post(url)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_add_memory_check(self):
|
||||
url = f"{base_url}/"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
|
||||
@@ -6,6 +6,7 @@ urlpatterns = [
|
||||
path("", views.GetAddChecks.as_view()),
|
||||
path("<int:pk>/", views.GetUpdateDeleteCheck.as_view()),
|
||||
path("<int:pk>/reset/", views.ResetCheck.as_view()),
|
||||
path("<agent:agent_id>/resetall/", views.ResetAllChecksStatus.as_view()),
|
||||
path("<agent:agent_id>/run/", views.run_checks),
|
||||
path("<int:pk>/history/", views.GetCheckHistory.as_view()),
|
||||
path("<str:target>/<int:pk>/csbulkrun/", views.bulk_run_checks),
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import asyncio
|
||||
from datetime import datetime as dt
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import msgpack
|
||||
import nats
|
||||
from django.db.models import Q
|
||||
from django.db.models import Prefetch, Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
@@ -16,17 +13,16 @@ from rest_framework.views import APIView
|
||||
from agents.models import Agent
|
||||
from alerts.models import Alert
|
||||
from automation.models import Policy
|
||||
from tacticalrmm.constants import CheckStatus, CheckType
|
||||
from tacticalrmm.helpers import notify_error, setup_nats_options
|
||||
from tacticalrmm.constants import AGENT_DEFER, CheckStatus, CheckType
|
||||
from tacticalrmm.exceptions import NatsDown
|
||||
from tacticalrmm.helpers import notify_error
|
||||
from tacticalrmm.nats_utils import abulk_nats_command
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
|
||||
from .models import Check, CheckHistory, CheckResult
|
||||
from .permissions import BulkRunChecksPerms, ChecksPerms, RunChecksPerms
|
||||
from .serializers import CheckHistorySerializer, CheckSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from nats.aio.client import Client as NATSClient
|
||||
|
||||
|
||||
class GetAddChecks(APIView):
|
||||
permission_classes = [IsAuthenticated, ChecksPerms]
|
||||
@@ -126,15 +122,54 @@ class ResetCheck(APIView):
|
||||
result.save()
|
||||
|
||||
# resolve any alerts that are open
|
||||
alert = Alert.create_or_return_check_alert(
|
||||
if alert := Alert.create_or_return_check_alert(
|
||||
result.assigned_check, agent=result.agent, skip_create=True
|
||||
)
|
||||
if alert:
|
||||
):
|
||||
alert.resolve()
|
||||
|
||||
return Response("The check status was reset")
|
||||
|
||||
|
||||
class ResetAllChecksStatus(APIView):
|
||||
permission_classes = [IsAuthenticated, ChecksPerms]
|
||||
|
||||
def post(self, request, agent_id):
|
||||
agent = get_object_or_404(
|
||||
Agent.objects.defer(*AGENT_DEFER)
|
||||
.select_related(
|
||||
"policy",
|
||||
"policy__alert_template",
|
||||
"alert_template",
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"checkresults",
|
||||
queryset=CheckResult.objects.select_related("assigned_check"),
|
||||
),
|
||||
"agentchecks",
|
||||
),
|
||||
agent_id=agent_id,
|
||||
)
|
||||
|
||||
if not _has_perm_on_agent(request.user, agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
for check in agent.get_checks_with_policies():
|
||||
try:
|
||||
result = check.check_result
|
||||
result.status = CheckStatus.PASSING
|
||||
result.save()
|
||||
if alert := Alert.create_or_return_check_alert(
|
||||
result.assigned_check, agent=agent, skip_create=True
|
||||
):
|
||||
alert.resolve()
|
||||
except:
|
||||
# check hasn't run yet, no check result entry
|
||||
continue
|
||||
|
||||
return Response("All checks status were reset")
|
||||
|
||||
|
||||
class GetCheckHistory(APIView):
|
||||
permission_classes = [IsAuthenticated, ChecksPerms]
|
||||
|
||||
@@ -189,29 +224,22 @@ def bulk_run_checks(request, target, pk):
|
||||
case "site":
|
||||
q = Q(site__id=pk)
|
||||
|
||||
agents = list(
|
||||
agent_ids = list(
|
||||
Agent.objects.only("agent_id", "site")
|
||||
.filter(q)
|
||||
.values_list("agent_id", flat=True)
|
||||
)
|
||||
|
||||
if not agents:
|
||||
if not agent_ids:
|
||||
return notify_error("No agents matched query")
|
||||
|
||||
async def _run_check(nc: "NATSClient", sub) -> None:
|
||||
await nc.publish(subject=sub, payload=msgpack.dumps({"func": "runchecks"}))
|
||||
payload = {"func": "runchecks"}
|
||||
items = [(agent_id, payload) for agent_id in agent_ids]
|
||||
|
||||
async def _run() -> None:
|
||||
opts = setup_nats_options()
|
||||
try:
|
||||
nc = await nats.connect(**opts)
|
||||
except Exception as e:
|
||||
return notify_error(str(e))
|
||||
try:
|
||||
asyncio.run(abulk_nats_command(items=items))
|
||||
except NatsDown as e:
|
||||
return notify_error(str(e))
|
||||
|
||||
tasks = [_run_check(nc=nc, sub=agent) for agent in agents]
|
||||
await asyncio.gather(*tasks)
|
||||
await nc.close()
|
||||
|
||||
asyncio.run(_run())
|
||||
ret = f"Checks will now be run on {len(agents)} agents"
|
||||
ret = f"Checks will now be run on {len(agent_ids)} agents"
|
||||
return Response(ret)
|
||||
|
||||
@@ -49,11 +49,7 @@ class Client(BaseAuditModel):
|
||||
|
||||
# get old client if exists
|
||||
old_client = Client.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Client, self).save(
|
||||
old_model=old_client,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(old_model=old_client, *args, **kwargs)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client and (
|
||||
@@ -129,11 +125,7 @@ class Site(BaseAuditModel):
|
||||
|
||||
# get old client if exists
|
||||
old_site = Site.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(
|
||||
old_model=old_site,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
super().save(old_model=old_site, *args, **kwargs)
|
||||
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
@@ -141,6 +133,7 @@ class Site(BaseAuditModel):
|
||||
old_site.alert_template != self.alert_template
|
||||
or old_site.workstation_policy != self.workstation_policy
|
||||
or old_site.server_policy != self.server_policy
|
||||
or old_site.client != self.client
|
||||
):
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
|
||||
@@ -88,6 +88,7 @@ class TestClientViews(TacticalTestCase):
|
||||
"client": {"name": "Setup Client"},
|
||||
"site": {"name": "Setup Site"},
|
||||
"timezone": "America/Los_Angeles",
|
||||
"companyname": "TestCo Inc.",
|
||||
"initialsetup": True,
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
|
||||
@@ -3,6 +3,7 @@ import re
|
||||
import uuid
|
||||
from contextlib import suppress
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Count, Exists, OuterRef, Prefetch, prefetch_related_objects
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -91,7 +92,8 @@ class GetAddClients(APIView):
|
||||
if "initialsetup" in request.data.keys():
|
||||
core = get_core_settings()
|
||||
core.default_time_zone = request.data["timezone"]
|
||||
core.save(update_fields=["default_time_zone"])
|
||||
core.mesh_company_name = request.data["companyname"]
|
||||
core.save(update_fields=["default_time_zone", "mesh_company_name"])
|
||||
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
@@ -288,6 +290,9 @@ class AgentDeployment(APIView):
|
||||
return Response(DeploymentSerializer(deps, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
if getattr(settings, "TRMM_INSECURE", False):
|
||||
return notify_error("Not available in insecure mode")
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
site = get_object_or_404(Site, pk=request.data["site"])
|
||||
@@ -343,6 +348,9 @@ class GenerateAgent(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def get(self, request, uid):
|
||||
if getattr(settings, "TRMM_INSECURE", False):
|
||||
return notify_error("Not available in insecure mode")
|
||||
|
||||
from tacticalrmm.utils import generate_winagent_exe
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if [ $EUID -ne 0 ]; then
|
||||
echo "ERROR: Must be run as root"
|
||||
exit 1
|
||||
echo "ERROR: Must be run as root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
HAS_SYSTEMD=$(ps --no-headers -o comm 1)
|
||||
@@ -12,6 +12,19 @@ if [ "${HAS_SYSTEMD}" != 'systemd' ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $DISPLAY ]]; then
|
||||
echo "ERROR: Display detected. Installer only supports running headless, i.e from ssh."
|
||||
echo "If you cannot ssh in then please run 'sudo systemctl isolate multi-user.target' to switch to a non-graphical user session and run the installer again."
|
||||
echo "If you are already running headless, then you are probably running with X forwarding which is setting DISPLAY, if so then simply run"
|
||||
echo "unset DISPLAY"
|
||||
echo "to unset the variable and then try running the installer again"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DEBUG=0
|
||||
INSECURE=0
|
||||
NOMESH=0
|
||||
|
||||
agentDL='agentDLChange'
|
||||
meshDL='meshDLChange'
|
||||
|
||||
@@ -28,6 +41,7 @@ agentBin="${agentBinPath}/${binName}"
|
||||
agentConf='/etc/tacticalagent'
|
||||
agentSvcName='tacticalagent.service'
|
||||
agentSysD="/etc/systemd/system/${agentSvcName}"
|
||||
agentDir='/opt/tacticalagent'
|
||||
meshDir='/opt/tacticalmesh'
|
||||
meshSystemBin="${meshDir}/meshagent"
|
||||
meshSvcName='meshagent.service'
|
||||
@@ -37,38 +51,48 @@ deb=(ubuntu debian raspbian kali linuxmint)
|
||||
rhe=(fedora rocky centos rhel amzn arch opensuse)
|
||||
|
||||
set_locale_deb() {
|
||||
locale-gen "en_US.UTF-8"
|
||||
localectl set-locale LANG=en_US.UTF-8
|
||||
. /etc/default/locale
|
||||
locale-gen "en_US.UTF-8"
|
||||
localectl set-locale LANG=en_US.UTF-8
|
||||
. /etc/default/locale
|
||||
}
|
||||
|
||||
set_locale_rhel() {
|
||||
localedef -c -i en_US -f UTF-8 en_US.UTF-8 > /dev/null 2>&1
|
||||
localectl set-locale LANG=en_US.UTF-8
|
||||
. /etc/locale.conf
|
||||
localedef -c -i en_US -f UTF-8 en_US.UTF-8 >/dev/null 2>&1
|
||||
localectl set-locale LANG=en_US.UTF-8
|
||||
. /etc/locale.conf
|
||||
}
|
||||
|
||||
RemoveOldAgent() {
|
||||
if [ -f "${agentSysD}" ]; then
|
||||
systemctl disable ${agentSvcName}
|
||||
systemctl stop ${agentSvcName}
|
||||
rm -f ${agentSysD}
|
||||
rm -f "${agentSysD}"
|
||||
systemctl daemon-reload
|
||||
fi
|
||||
|
||||
if [ -f "${agentConf}" ]; then
|
||||
rm -f ${agentConf}
|
||||
rm -f "${agentConf}"
|
||||
fi
|
||||
|
||||
if [ -f "${agentBin}" ]; then
|
||||
rm -f ${agentBin}
|
||||
rm -f "${agentBin}"
|
||||
fi
|
||||
|
||||
if [ -d "${agentDir}" ]; then
|
||||
rm -rf "${agentDir}"
|
||||
fi
|
||||
}
|
||||
|
||||
InstallMesh() {
|
||||
if [ -f /etc/os-release ]; then
|
||||
distroID=$(. /etc/os-release; echo $ID)
|
||||
distroIDLIKE=$(. /etc/os-release; echo $ID_LIKE)
|
||||
distroID=$(
|
||||
. /etc/os-release
|
||||
echo $ID
|
||||
)
|
||||
distroIDLIKE=$(
|
||||
. /etc/os-release
|
||||
echo $ID_LIKE
|
||||
)
|
||||
if [[ " ${deb[*]} " =~ " ${distroID} " ]]; then
|
||||
set_locale_deb
|
||||
elif [[ " ${deb[*]} " =~ " ${distroIDLIKE} " ]]; then
|
||||
@@ -80,11 +104,9 @@ InstallMesh() {
|
||||
fi
|
||||
fi
|
||||
|
||||
meshTmpDir=$(mktemp -d -t "mesh-XXXXXXXXX")
|
||||
if [ $? -ne 0 ]; then
|
||||
meshTmpDir='/root/meshtemp'
|
||||
mkdir -p ${meshTmpDir}
|
||||
fi
|
||||
meshTmpDir='/root/meshtemp'
|
||||
mkdir -p $meshTmpDir
|
||||
|
||||
meshTmpBin="${meshTmpDir}/meshagent"
|
||||
wget --no-check-certificate -q -O ${meshTmpBin} ${meshDL}
|
||||
chmod +x ${meshTmpBin}
|
||||
@@ -101,8 +123,8 @@ RemoveMesh() {
|
||||
fi
|
||||
|
||||
if [ -f "${meshSysD}" ]; then
|
||||
systemctl stop ${meshSvcName} > /dev/null 2>&1
|
||||
systemctl disable ${meshSvcName} > /dev/null 2>&1
|
||||
systemctl stop ${meshSvcName} >/dev/null 2>&1
|
||||
systemctl disable ${meshSvcName} >/dev/null 2>&1
|
||||
rm -f ${meshSysD}
|
||||
fi
|
||||
|
||||
@@ -115,11 +137,26 @@ Uninstall() {
|
||||
RemoveOldAgent
|
||||
}
|
||||
|
||||
if [ $# -ne 0 ] && [ $1 == 'uninstall' ]; then
|
||||
if [ $# -ne 0 ] && [[ $1 =~ ^(uninstall|-uninstall|--uninstall)$ ]]; then
|
||||
Uninstall
|
||||
# Remove the current script
|
||||
rm "$0"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
case $1 in
|
||||
-debug | --debug | debug) DEBUG=1 ;;
|
||||
-insecure | --insecure | insecure) INSECURE=1 ;;
|
||||
-nomesh | --nomesh | nomesh) NOMESH=1 ;;
|
||||
*)
|
||||
echo "ERROR: Unknown parameter: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
RemoveOldAgent
|
||||
|
||||
echo "Downloading tactical agent..."
|
||||
@@ -132,7 +169,7 @@ chmod +x ${agentBin}
|
||||
|
||||
MESH_NODE_ID=""
|
||||
|
||||
if [ $# -ne 0 ] && [ $1 == '--nomesh' ]; then
|
||||
if [[ $NOMESH -eq 1 ]]; then
|
||||
echo "Skipping mesh install"
|
||||
else
|
||||
if [ -f "${meshSystemBin}" ]; then
|
||||
@@ -150,23 +187,28 @@ if [ ! -d "${agentBinPath}" ]; then
|
||||
mkdir -p ${agentBinPath}
|
||||
fi
|
||||
|
||||
if [ $# -ne 0 ] && [ $1 == '--debug' ]; then
|
||||
INSTALL_CMD="${agentBin} -m install -api ${apiURL} -client-id ${clientID} -site-id ${siteID} -agent-type ${agentType} -auth ${token} -log debug"
|
||||
else
|
||||
INSTALL_CMD="${agentBin} -m install -api ${apiURL} -client-id ${clientID} -site-id ${siteID} -agent-type ${agentType} -auth ${token}"
|
||||
fi
|
||||
INSTALL_CMD="${agentBin} -m install -api ${apiURL} -client-id ${clientID} -site-id ${siteID} -agent-type ${agentType} -auth ${token}"
|
||||
|
||||
if [ "${MESH_NODE_ID}" != '' ]; then
|
||||
INSTALL_CMD+=" -meshnodeid ${MESH_NODE_ID}"
|
||||
INSTALL_CMD+=" --meshnodeid ${MESH_NODE_ID}"
|
||||
fi
|
||||
|
||||
if [[ $DEBUG -eq 1 ]]; then
|
||||
INSTALL_CMD+=" --log debug"
|
||||
fi
|
||||
|
||||
if [[ $INSECURE -eq 1 ]]; then
|
||||
INSTALL_CMD+=" --insecure"
|
||||
fi
|
||||
|
||||
if [ "${proxy}" != '' ]; then
|
||||
INSTALL_CMD+=" -proxy ${proxy}"
|
||||
INSTALL_CMD+=" --proxy ${proxy}"
|
||||
fi
|
||||
|
||||
eval ${INSTALL_CMD}
|
||||
|
||||
tacticalsvc="$(cat << EOF
|
||||
tacticalsvc="$(
|
||||
cat <<EOF
|
||||
[Unit]
|
||||
Description=Tactical RMM Linux Agent
|
||||
|
||||
@@ -184,7 +226,7 @@ KillMode=process
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
)"
|
||||
echo "${tacticalsvc}" | tee ${agentSysD} > /dev/null
|
||||
echo "${tacticalsvc}" | tee ${agentSysD} >/dev/null
|
||||
|
||||
systemctl daemon-reload
|
||||
systemctl enable ${agentSvcName}
|
||||
|
||||
@@ -1,14 +1,38 @@
|
||||
import asyncio
|
||||
import fcntl
|
||||
import os
|
||||
import pty
|
||||
import select
|
||||
import signal
|
||||
import struct
|
||||
import subprocess
|
||||
import termios
|
||||
import threading
|
||||
import uuid
|
||||
from contextlib import suppress
|
||||
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer, JsonWebsocketConsumer
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.db.models import F
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.constants import AgentMonType
|
||||
from tacticalrmm.helpers import days_until_cert_expires
|
||||
from tacticalrmm.logger import logger
|
||||
|
||||
|
||||
def _has_perm(user, perm: str) -> bool:
|
||||
if user.is_superuser or (user.role and getattr(user.role, "is_superuser")):
|
||||
return True
|
||||
|
||||
# make sure non-superusers with empty roles aren't permitted
|
||||
elif not user.role:
|
||||
return False
|
||||
|
||||
return user.role and getattr(user.role, perm)
|
||||
|
||||
|
||||
class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
@@ -17,6 +41,11 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
await self.close()
|
||||
return
|
||||
|
||||
if self.user.block_dashboard_login:
|
||||
await self.close()
|
||||
return
|
||||
|
||||
await self.accept()
|
||||
self.connected = True
|
||||
@@ -27,7 +56,6 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
self.dash_info.cancel()
|
||||
|
||||
self.connected = False
|
||||
await self.close()
|
||||
|
||||
async def receive_json(self, payload, **kwargs):
|
||||
pass
|
||||
@@ -62,12 +90,15 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
return {
|
||||
"total_server_offline_count": offline_server_agents_count,
|
||||
"total_workstation_offline_count": offline_workstation_agents_count,
|
||||
"total_server_count": total_server_agents_count,
|
||||
"total_workstation_count": total_workstation_agents_count,
|
||||
"action": "dashboard.agentcount",
|
||||
"data": {
|
||||
"total_server_offline_count": offline_server_agents_count,
|
||||
"total_workstation_offline_count": offline_workstation_agents_count,
|
||||
"total_server_count": total_server_agents_count,
|
||||
"total_workstation_count": total_workstation_agents_count,
|
||||
"days_until_cert_expires": days_until_cert_expires(),
|
||||
},
|
||||
}
|
||||
|
||||
async def send_dash_info(self):
|
||||
@@ -75,3 +106,137 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
c = await self.get_dashboard_info()
|
||||
await self.send_json(c)
|
||||
await asyncio.sleep(30)
|
||||
|
||||
|
||||
class TerminalConsumer(JsonWebsocketConsumer):
|
||||
child_pid = None
|
||||
fd = None
|
||||
shell = None
|
||||
command = ["/bin/bash"]
|
||||
user = None
|
||||
subprocess = None
|
||||
authorized = False
|
||||
connected = False
|
||||
|
||||
def run_command(self):
|
||||
master_fd, slave_fd = pty.openpty()
|
||||
|
||||
self.fd = master_fd
|
||||
env = os.environ.copy()
|
||||
env["TERM"] = "xterm"
|
||||
|
||||
with subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
|
||||
self.command,
|
||||
stdin=slave_fd,
|
||||
stdout=slave_fd,
|
||||
stderr=slave_fd,
|
||||
preexec_fn=os.setsid,
|
||||
env=env,
|
||||
cwd=os.getenv("HOME", os.getcwd()),
|
||||
) as proc:
|
||||
self.subprocess = proc
|
||||
self.child_pid = proc.pid
|
||||
proc.wait()
|
||||
|
||||
# Subprocess has finished, close the websocket
|
||||
# happens when process exits, either via user exiting using exit() or by error
|
||||
self.subprocess = None
|
||||
self.child_pid = None
|
||||
if self.connected:
|
||||
self.connected = False
|
||||
self.close(4030)
|
||||
|
||||
def connect(self):
|
||||
if "user" not in self.scope:
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
self.user = self.scope["user"]
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
self.close()
|
||||
return
|
||||
|
||||
if not self.user.is_authenticated:
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
core: CoreSettings = CoreSettings.objects.first() # type: ignore
|
||||
if not core.web_terminal_enabled:
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
if self.user.block_dashboard_login or not _has_perm(
|
||||
self.user, "can_use_webterm"
|
||||
):
|
||||
self.close(4401)
|
||||
return
|
||||
|
||||
if self.child_pid is not None:
|
||||
return
|
||||
|
||||
self.connected = True
|
||||
self.authorized = True
|
||||
self.accept()
|
||||
|
||||
# Daemonize the thread so it automatically dies when the main thread exits
|
||||
thread = threading.Thread(target=self.run_command, daemon=True)
|
||||
thread.start()
|
||||
|
||||
thread = threading.Thread(target=self.read_from_pty, daemon=True)
|
||||
thread.start()
|
||||
|
||||
def read_from_pty(self):
|
||||
while True:
|
||||
select.select([self.fd], [], [])
|
||||
output = os.read(self.fd, 1024)
|
||||
if not output:
|
||||
break
|
||||
message = output.decode(errors="ignore")
|
||||
self.send_json(
|
||||
{
|
||||
"action": "trmmcli.output",
|
||||
"data": {"output": message, "messageId": str(uuid.uuid4())},
|
||||
}
|
||||
)
|
||||
|
||||
def resize(self, row, col, xpix=0, ypix=0):
|
||||
winsize = struct.pack("HHHH", row, col, xpix, ypix)
|
||||
fcntl.ioctl(self.fd, termios.TIOCSWINSZ, winsize)
|
||||
|
||||
def write_to_pty(self, message):
|
||||
os.write(self.fd, message.encode())
|
||||
|
||||
def kill_pty(self):
|
||||
if self.subprocess is not None:
|
||||
try:
|
||||
os.killpg(os.getpgid(self.child_pid), signal.SIGKILL)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to kill process group: {str(e)}")
|
||||
finally:
|
||||
self.subprocess = None
|
||||
self.child_pid = None
|
||||
|
||||
def disconnect(self, code):
|
||||
self.connected = False
|
||||
self.kill_pty()
|
||||
|
||||
def receive_json(self, data):
|
||||
if not self.authorized:
|
||||
return
|
||||
|
||||
action = data.get("action", None)
|
||||
|
||||
if not action:
|
||||
return
|
||||
|
||||
if action == "trmmcli.resize":
|
||||
self.resize(data["data"]["rows"], data["data"]["cols"])
|
||||
elif action == "trmmcli.input":
|
||||
message = data["data"]["input"]
|
||||
self.write_to_pty(message)
|
||||
elif action == "trmmcli.disconnect":
|
||||
self.kill_pty()
|
||||
self.send_json(
|
||||
{"action": "trmmcli.output", "data": {"output": "Terminal killed!"}}
|
||||
)
|
||||
|
||||
@@ -27,7 +27,7 @@ class Command(BaseCommand):
|
||||
self._warning("Mesh device group:", core.mesh_device_group)
|
||||
|
||||
try:
|
||||
token = get_auth_token(core.mesh_username, core.mesh_token)
|
||||
token = get_auth_token(core.mesh_api_superuser, core.mesh_token)
|
||||
except Exception as e:
|
||||
self._error("Error getting auth token:")
|
||||
self._error(str(e))
|
||||
|
||||
@@ -5,6 +5,7 @@ from tacticalrmm.constants import (
|
||||
AGENT_OUTAGES_LOCK,
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
)
|
||||
|
||||
@@ -18,5 +19,6 @@ class Command(BaseCommand):
|
||||
ORPHANED_WIN_TASK_LOCK,
|
||||
RESOLVE_ALERTS_LOCK,
|
||||
SYNC_SCHED_TASK_LOCK,
|
||||
SYNC_MESH_PERMS_TASK_LOCK,
|
||||
):
|
||||
cache.delete(key)
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
import multiprocessing
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate conf for gunicorn"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.stdout.write("Creating gunicorn conf...")
|
||||
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
|
||||
# worker processes
|
||||
workers = getattr(settings, "TRMM_GUNICORN_WORKERS", cpu_count * 2 + 1)
|
||||
threads = getattr(settings, "TRMM_GUNICORN_THREADS", cpu_count * 2)
|
||||
worker_class = getattr(settings, "TRMM_GUNICORN_WORKER_CLASS", "gthread")
|
||||
max_requests = getattr(settings, "TRMM_GUNICORN_MAX_REQUESTS", 50)
|
||||
max_requests_jitter = getattr(settings, "TRMM_GUNICORN_MAX_REQUESTS_JITTER", 8)
|
||||
worker_connections = getattr(settings, "TRMM_GUNICORN_WORKER_CONNS", 1000)
|
||||
timeout = getattr(settings, "TRMM_GUNICORN_TIMEOUT", 300)
|
||||
graceful_timeout = getattr(settings, "TRMM_GUNICORN_GRACEFUL_TIMEOUT", 300)
|
||||
|
||||
# socket
|
||||
backlog = getattr(settings, "TRMM_GUNICORN_BACKLOG", 2048)
|
||||
if getattr(settings, "DOCKER_BUILD", False):
|
||||
bind = "0.0.0.0:8080"
|
||||
else:
|
||||
bind = f"unix:{settings.BASE_DIR / 'tacticalrmm.sock'}"
|
||||
|
||||
# security
|
||||
limit_request_line = getattr(settings, "TRMM_GUNICORN_LIMIT_REQUEST_LINE", 0)
|
||||
limit_request_fields = getattr(
|
||||
settings, "TRMM_GUNICORN_LIMIT_REQUEST_FIELDS", 500
|
||||
)
|
||||
limit_request_field_size = getattr(
|
||||
settings, "TRMM_GUNICORN_LIMIT_REQUEST_FIELD_SIZE", 0
|
||||
)
|
||||
|
||||
# server
|
||||
preload_app = getattr(settings, "TRMM_GUNICORN_PRELOAD_APP", True)
|
||||
|
||||
# log
|
||||
loglevel = getattr(settings, "TRMM_GUNICORN_LOGLEVEL", "info")
|
||||
|
||||
cfg = [
|
||||
f"bind = '{bind}'",
|
||||
f"workers = {workers}",
|
||||
f"threads = {threads}",
|
||||
f"worker_class = '{worker_class}'",
|
||||
f"backlog = {backlog}",
|
||||
f"worker_connections = {worker_connections}",
|
||||
f"timeout = {timeout}",
|
||||
f"graceful_timeout = {graceful_timeout}",
|
||||
f"limit_request_line = {limit_request_line}",
|
||||
f"limit_request_fields = {limit_request_fields}",
|
||||
f"limit_request_field_size = {limit_request_field_size}",
|
||||
f"max_requests = {max_requests}",
|
||||
f"max_requests_jitter = {max_requests_jitter}",
|
||||
f"loglevel = '{loglevel}'",
|
||||
f"chdir = '{settings.BASE_DIR}'",
|
||||
f"preload_app = {preload_app}",
|
||||
]
|
||||
|
||||
with open(settings.BASE_DIR / "gunicorn_config.py", "w") as fp:
|
||||
for line in cfg:
|
||||
fp.write(line + "\n")
|
||||
|
||||
self.stdout.write("Created gunicorn conf")
|
||||
@@ -4,7 +4,7 @@ import os
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from tacticalrmm.helpers import get_nats_ports
|
||||
from tacticalrmm.helpers import get_nats_url
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -20,10 +20,9 @@ class Command(BaseCommand):
|
||||
else:
|
||||
ssl = "disable"
|
||||
|
||||
nats_std_port, _ = get_nats_ports()
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:{nats_std_port}",
|
||||
"natsurl": get_nats_url(),
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import configparser
|
||||
import math
|
||||
import multiprocessing
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
@@ -12,6 +15,27 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **kwargs):
|
||||
self.stdout.write("Creating uwsgi conf...")
|
||||
|
||||
try:
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
worker_initial = 3 if cpu_count == 1 else 4
|
||||
except:
|
||||
worker_initial = 4
|
||||
|
||||
try:
|
||||
ram = math.ceil(psutil.virtual_memory().total / (1024**3))
|
||||
if ram <= 2:
|
||||
max_requests = 15
|
||||
max_workers = 6
|
||||
elif ram <= 4:
|
||||
max_requests = 75
|
||||
max_workers = 20
|
||||
else:
|
||||
max_requests = 100
|
||||
max_workers = 40
|
||||
except:
|
||||
max_requests = 50
|
||||
max_workers = 10
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
if getattr(settings, "DOCKER_BUILD", False):
|
||||
@@ -35,15 +59,18 @@ class Command(BaseCommand):
|
||||
"buffer-size": str(getattr(settings, "UWSGI_BUFFER_SIZE", 65535)),
|
||||
"vacuum": str(getattr(settings, "UWSGI_VACUUM", True)).lower(),
|
||||
"die-on-term": str(getattr(settings, "UWSGI_DIE_ON_TERM", True)).lower(),
|
||||
"max-requests": str(getattr(settings, "UWSGI_MAX_REQUESTS", 500)),
|
||||
"max-requests": str(getattr(settings, "UWSGI_MAX_REQUESTS", max_requests)),
|
||||
"disable-logging": str(
|
||||
getattr(settings, "UWSGI_DISABLE_LOGGING", True)
|
||||
).lower(),
|
||||
"worker-reload-mercy": str(getattr(settings, "UWSGI_RELOAD_MERCY", 30)),
|
||||
"cheaper-algo": "busyness",
|
||||
"cheaper": str(getattr(settings, "UWSGI_CHEAPER", 4)),
|
||||
"cheaper-initial": str(getattr(settings, "UWSGI_CHEAPER_INITIAL", 4)),
|
||||
"workers": str(getattr(settings, "UWSGI_MAX_WORKERS", 40)),
|
||||
"cheaper-step": str(getattr(settings, "UWSGI_CHEAPER_STEP", 2)),
|
||||
"cheaper-initial": str(
|
||||
getattr(settings, "UWSGI_CHEAPER_INITIAL", worker_initial)
|
||||
),
|
||||
"workers": str(getattr(settings, "UWSGI_MAX_WORKERS", max_workers)),
|
||||
"cheaper-step": str(getattr(settings, "UWSGI_CHEAPER_STEP", 1)),
|
||||
"cheaper-overload": str(getattr(settings, "UWSGI_CHEAPER_OVERLOAD", 3)),
|
||||
"cheaper-busyness-min": str(getattr(settings, "UWSGI_BUSYNESS_MIN", 5)),
|
||||
"cheaper-busyness-max": str(getattr(settings, "UWSGI_BUSYNESS_MAX", 10)),
|
||||
|
||||
@@ -3,7 +3,8 @@ from urllib.parse import urlparse
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from tacticalrmm.helpers import get_webdomain
|
||||
from tacticalrmm.util_settings import get_backend_url, get_root_domain, get_webdomain
|
||||
from tacticalrmm.utils import get_certs
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -16,6 +17,8 @@ class Command(BaseCommand):
|
||||
match kwargs["name"]:
|
||||
case "api":
|
||||
self.stdout.write(settings.ALLOWED_HOSTS[0])
|
||||
case "rootdomain":
|
||||
self.stdout.write(get_root_domain(settings.ALLOWED_HOSTS[0]))
|
||||
case "version":
|
||||
self.stdout.write(settings.TRMM_VERSION)
|
||||
case "webversion":
|
||||
@@ -26,8 +29,16 @@ class Command(BaseCommand):
|
||||
self.stdout.write(settings.NATS_SERVER_VER)
|
||||
case "frontend":
|
||||
self.stdout.write(settings.CORS_ORIGIN_WHITELIST[0])
|
||||
case "backend_url":
|
||||
self.stdout.write(
|
||||
get_backend_url(
|
||||
settings.ALLOWED_HOSTS[0],
|
||||
settings.TRMM_PROTO,
|
||||
settings.TRMM_BACKEND_PORT,
|
||||
)
|
||||
)
|
||||
case "webdomain":
|
||||
self.stdout.write(get_webdomain())
|
||||
self.stdout.write(get_webdomain(settings.CORS_ORIGIN_WHITELIST[0]))
|
||||
case "djangoadmin":
|
||||
url = f"https://{settings.ALLOWED_HOSTS[0]}/{settings.ADMIN_URL}"
|
||||
self.stdout.write(url)
|
||||
@@ -59,3 +70,9 @@ class Command(BaseCommand):
|
||||
obj = core.mesh_token
|
||||
|
||||
self.stdout.write(obj)
|
||||
case "certfile" | "keyfile":
|
||||
crt, key = get_certs()
|
||||
if kwargs["name"] == "certfile":
|
||||
self.stdout.write(crt)
|
||||
elif kwargs["name"] == "keyfile":
|
||||
self.stdout.write(key)
|
||||
|
||||
@@ -5,13 +5,14 @@ import websockets
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.utils import get_mesh_ws_url
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
async with websockets.connect(uri) as websocket:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as websocket:
|
||||
# Get Invitation Link
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from meshctrl.utils import get_login_token
|
||||
|
||||
from core.utils import get_core_settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "generate a url to login to mesh as the superuser"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
core = get_core_settings()
|
||||
|
||||
token = get_login_token(key=core.mesh_token, user=f"user//{core.mesh_username}")
|
||||
token_param = f"login={token}&"
|
||||
|
||||
control = f"{core.mesh_site}/?{token_param}"
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(control))
|
||||
@@ -6,13 +6,14 @@ from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.utils import get_core_settings, get_mesh_ws_url
|
||||
from tacticalrmm.constants import TRMM_WS_MAX_SIZE
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, uri):
|
||||
async with websockets.connect(uri) as websocket:
|
||||
async with websockets.connect(uri, max_size=TRMM_WS_MAX_SIZE) as websocket:
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user