Compare commits
720 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42cdf70cb4 | ||
|
|
6beb6be131 | ||
|
|
fa4fc2a708 | ||
|
|
2db9758260 | ||
|
|
715982e40a | ||
|
|
d00cd4453a | ||
|
|
429c08c24a | ||
|
|
6a71490e20 | ||
|
|
9bceda0646 | ||
|
|
a1027a6773 | ||
|
|
302d4b75f9 | ||
|
|
5f6ee0e883 | ||
|
|
27f9720de1 | ||
|
|
22aa3fdbbc | ||
|
|
069ecdd33f | ||
|
|
dd545ae933 | ||
|
|
6650b705c4 | ||
|
|
59b0350289 | ||
|
|
1ad159f820 | ||
|
|
0bf42190e9 | ||
|
|
d2fa836232 | ||
|
|
c387774093 | ||
|
|
e99736ba3c | ||
|
|
16cb54fcc9 | ||
|
|
5aa15c51ec | ||
|
|
a8aedd9cf3 | ||
|
|
b851b632bc | ||
|
|
541e07fb65 | ||
|
|
6ad16a897d | ||
|
|
72f1053a93 | ||
|
|
fb15a2762c | ||
|
|
9165248b91 | ||
|
|
add18b29db | ||
|
|
1971653548 | ||
|
|
392cd64d7b | ||
|
|
b5affbb7c8 | ||
|
|
71d1206277 | ||
|
|
26e6a8c409 | ||
|
|
eb54fae11a | ||
|
|
ee773e5966 | ||
|
|
7218ccdba8 | ||
|
|
332400e48a | ||
|
|
ad1a5d3702 | ||
|
|
3006b4184d | ||
|
|
84eb84a080 | ||
|
|
60beea548b | ||
|
|
5f9c149e59 | ||
|
|
53367c6f04 | ||
|
|
d7f817ee44 | ||
|
|
d33a87da54 | ||
|
|
3aebfb12b7 | ||
|
|
1d6c55ffa6 | ||
|
|
5e7080aac3 | ||
|
|
fad739bc01 | ||
|
|
c6b7f23884 | ||
|
|
a6f7e446de | ||
|
|
89d95d3ae1 | ||
|
|
764208698f | ||
|
|
57129cf934 | ||
|
|
aae1a842d5 | ||
|
|
623f35aec7 | ||
|
|
870bf842cf | ||
|
|
07f2d7dd5c | ||
|
|
f223f2edc5 | ||
|
|
e848a9a577 | ||
|
|
7569d98e07 | ||
|
|
596dee2f24 | ||
|
|
9970403964 | ||
|
|
07a88ae00d | ||
|
|
5475b4d287 | ||
|
|
6631dcfd3e | ||
|
|
0dd3f337f3 | ||
|
|
8eb27b5875 | ||
|
|
2d1863031c | ||
|
|
9feb76ca81 | ||
|
|
993e8f4ab3 | ||
|
|
e08ae95d4f | ||
|
|
15359e8846 | ||
|
|
d1457b312b | ||
|
|
c9dd2af196 | ||
|
|
564ef4e688 | ||
|
|
a33e6e8bb5 | ||
|
|
cf34f33f04 | ||
|
|
827cfe4e8f | ||
|
|
2ce1c2383c | ||
|
|
6fc0a665ae | ||
|
|
4f16d01263 | ||
|
|
67cc37354a | ||
|
|
e388243ef4 | ||
|
|
3dc92763c7 | ||
|
|
dfe97dd466 | ||
|
|
2803cee29b | ||
|
|
3a03020e54 | ||
|
|
64443cc703 | ||
|
|
4d1aa6ed18 | ||
|
|
84837e88d2 | ||
|
|
ff49c936ea | ||
|
|
e6e0901329 | ||
|
|
23b6284b51 | ||
|
|
33dfbcbe32 | ||
|
|
700c23d537 | ||
|
|
369fac9e38 | ||
|
|
2229eb1167 | ||
|
|
a3dec841b6 | ||
|
|
b17620bdb6 | ||
|
|
f39cd5ae2f | ||
|
|
83a19e005b | ||
|
|
a9dd01b0c8 | ||
|
|
eb59afa1d1 | ||
|
|
2adcfce9d0 | ||
|
|
314ab9b304 | ||
|
|
8576fb82c7 | ||
|
|
0f95a6bb2f | ||
|
|
ad5104567d | ||
|
|
ece68ba1d5 | ||
|
|
acccd3a586 | ||
|
|
8ebef1c1ca | ||
|
|
28abc0d5ed | ||
|
|
1efe25d3ec | ||
|
|
c40e4f8e4b | ||
|
|
baca84092d | ||
|
|
346d4da059 | ||
|
|
ade64d6c0a | ||
|
|
8204bdfc5f | ||
|
|
1a9bb3e986 | ||
|
|
49356479e5 | ||
|
|
c44e9a7292 | ||
|
|
21771a593f | ||
|
|
84458dfc4c | ||
|
|
5835632dab | ||
|
|
67aa7229ef | ||
|
|
b72dc3ed3a | ||
|
|
0f93d4a5bd | ||
|
|
106320b035 | ||
|
|
63951705cd | ||
|
|
a8d56921d5 | ||
|
|
10bc133cf1 | ||
|
|
adeb5b35c9 | ||
|
|
589ff46ea5 | ||
|
|
656fcb9fe7 | ||
|
|
1cb9353006 | ||
|
|
57bf16ba07 | ||
|
|
659846ed88 | ||
|
|
25894044e0 | ||
|
|
e7a0826beb | ||
|
|
1f7ddee23b | ||
|
|
7e186730db | ||
|
|
6713a50208 | ||
|
|
7c9d8fcfec | ||
|
|
33bfc8cfe8 | ||
|
|
ca735bc14a | ||
|
|
4ba748a18b | ||
|
|
f1845106f8 | ||
|
|
67e7156c4b | ||
|
|
4a476adebf | ||
|
|
918798f8cc | ||
|
|
5a3f868866 | ||
|
|
feea2c6396 | ||
|
|
707b4c46d9 | ||
|
|
89ca39fc2b | ||
|
|
204281b12d | ||
|
|
a8538a7e95 | ||
|
|
dee1b471e9 | ||
|
|
aa04e9b01f | ||
|
|
350f0dc604 | ||
|
|
6021f2efd6 | ||
|
|
51838ec25a | ||
|
|
54768a121e | ||
|
|
8ff72cdca3 | ||
|
|
2cb53ad06b | ||
|
|
b8349de31d | ||
|
|
d7e11af7f8 | ||
|
|
dd8d39e698 | ||
|
|
afb1316daa | ||
|
|
04d7017536 | ||
|
|
6a1c75b060 | ||
|
|
5c94611f3b | ||
|
|
4e5676e80f | ||
|
|
c96d688a9c | ||
|
|
804242e9a5 | ||
|
|
0ec9760b17 | ||
|
|
d481ae3da4 | ||
|
|
4742c14fc1 | ||
|
|
509b0d501b | ||
|
|
d4c9b04d4e | ||
|
|
16fb4d331b | ||
|
|
e9e5bf31a7 | ||
|
|
221418120e | ||
|
|
46f852e26e | ||
|
|
4234cf0a31 | ||
|
|
7f3daea648 | ||
|
|
2eb16c82f4 | ||
|
|
e00b2ce591 | ||
|
|
d71e1311ca | ||
|
|
2cf16963e3 | ||
|
|
10bf7b7fb4 | ||
|
|
182c85a228 | ||
|
|
94b1988b90 | ||
|
|
6f7e62e9a0 | ||
|
|
aa7076af04 | ||
|
|
c928e8f0d4 | ||
|
|
5c6b106f68 | ||
|
|
d45bcea1ff | ||
|
|
6ff2dc79f8 | ||
|
|
b752329987 | ||
|
|
f21465335a | ||
|
|
0801adfc4b | ||
|
|
5bee8052d5 | ||
|
|
68dca5dfef | ||
|
|
3f51dd1d2f | ||
|
|
7f80889d77 | ||
|
|
efc61c0222 | ||
|
|
6fc0a05d34 | ||
|
|
a9be872d7a | ||
|
|
6ca85f099e | ||
|
|
86ff677b8a | ||
|
|
35e295df86 | ||
|
|
cd4d301790 | ||
|
|
93bb329c3d | ||
|
|
7c1e0f2c30 | ||
|
|
b57f471f44 | ||
|
|
252a9a2ed6 | ||
|
|
7258d4d787 | ||
|
|
75522fa295 | ||
|
|
4ba8f41d95 | ||
|
|
f326f8e4de | ||
|
|
f863dc058e | ||
|
|
20891db251 | ||
|
|
f1d05f1342 | ||
|
|
8dd636b0eb | ||
|
|
6b5bda8ee1 | ||
|
|
ddc5597157 | ||
|
|
ae112c7257 | ||
|
|
c22f10f96a | ||
|
|
18d10c9bec | ||
|
|
890e430cb7 | ||
|
|
dadc3d4cd7 | ||
|
|
d98b4d7320 | ||
|
|
340f532238 | ||
|
|
7669f68e7c | ||
|
|
3557e5514f | ||
|
|
a9f09b7614 | ||
|
|
845b9e4568 | ||
|
|
24a6092dcf | ||
|
|
195ae7d8b1 | ||
|
|
a5c6ea7ffc | ||
|
|
eb7a4ac29f | ||
|
|
508ef73fde | ||
|
|
838d6d8076 | ||
|
|
762c3159b8 | ||
|
|
7a88a06bcf | ||
|
|
0b1e3d7de5 | ||
|
|
9a83c73f21 | ||
|
|
aa50c7b268 | ||
|
|
179a5a80f4 | ||
|
|
0ddae527ef | ||
|
|
ee7a46de26 | ||
|
|
95522fda74 | ||
|
|
e58881c2bd | ||
|
|
36a902a44e | ||
|
|
16b74549a2 | ||
|
|
da7ededfb1 | ||
|
|
790bb08718 | ||
|
|
e6765f421f | ||
|
|
7e8f1fe904 | ||
|
|
eacce4578a | ||
|
|
07b2543972 | ||
|
|
d1c3fc8493 | ||
|
|
f453b16010 | ||
|
|
05151d8978 | ||
|
|
8218e1acc3 | ||
|
|
30212fc89a | ||
|
|
b31c13fcae | ||
|
|
6b95fc6f1d | ||
|
|
369cf17eb2 | ||
|
|
4dd8f512cc | ||
|
|
26cfec7d80 | ||
|
|
67a87ccf00 | ||
|
|
667cebcf94 | ||
|
|
bc1747ca1c | ||
|
|
945d8647bf | ||
|
|
dfe2e94627 | ||
|
|
09a5591eec | ||
|
|
f2bf06a0ba | ||
|
|
eedad4ab1c | ||
|
|
336a62ab29 | ||
|
|
b5603a5233 | ||
|
|
73890f553c | ||
|
|
f6243b8968 | ||
|
|
3770dc74d4 | ||
|
|
45f4e947c5 | ||
|
|
9928d7c6e1 | ||
|
|
bf776eeb2b | ||
|
|
ae7c0e9195 | ||
|
|
e90b640602 | ||
|
|
ba7529d3f5 | ||
|
|
34667f252e | ||
|
|
d18bddcb7b | ||
|
|
96dff49d33 | ||
|
|
b389728338 | ||
|
|
cdc7da86f3 | ||
|
|
4745cc0378 | ||
|
|
434f132479 | ||
|
|
fb0f31ffc7 | ||
|
|
bb1d73c0ae | ||
|
|
0e823d1191 | ||
|
|
48f4199ff3 | ||
|
|
eaf379587b | ||
|
|
672446b7d1 | ||
|
|
dfe52c1b07 | ||
|
|
d63df03ad8 | ||
|
|
aba4f9f2ce | ||
|
|
ac5c1e7803 | ||
|
|
d521dbf50e | ||
|
|
f210ed3e6a | ||
|
|
df3cac4ea6 | ||
|
|
f778c5175b | ||
|
|
6c66ff28dd | ||
|
|
d5b6ec702b | ||
|
|
c62a5fcef2 | ||
|
|
59c47e9200 | ||
|
|
4ba44d8932 | ||
|
|
27dae05e1b | ||
|
|
a251ae9b90 | ||
|
|
7e960b2bde | ||
|
|
5df4825158 | ||
|
|
8984d06d93 | ||
|
|
eed7aac047 | ||
|
|
54b068de4a | ||
|
|
f0f33b00b6 | ||
|
|
1043405088 | ||
|
|
0131b10805 | ||
|
|
a19b441f62 | ||
|
|
28edc31d43 | ||
|
|
0f9872a818 | ||
|
|
76ce4296f3 | ||
|
|
3dd2671380 | ||
|
|
298ca31332 | ||
|
|
8f911aa6b9 | ||
|
|
82a5c7d9b1 | ||
|
|
7f013dcdba | ||
|
|
68e2e16076 | ||
|
|
ea23c763c9 | ||
|
|
5dcecb3206 | ||
|
|
5bd48e2d0e | ||
|
|
afd0a02589 | ||
|
|
2379192d53 | ||
|
|
a6489290c8 | ||
|
|
5f74c43415 | ||
|
|
aa8b84a302 | ||
|
|
b987d041b0 | ||
|
|
b62e37307e | ||
|
|
61a59aa6ac | ||
|
|
f79ec27f1d | ||
|
|
b993fe380f | ||
|
|
d974b5f55f | ||
|
|
f21ae93197 | ||
|
|
342ff18be8 | ||
|
|
a8236f69bf | ||
|
|
ab15a2448d | ||
|
|
6ff4d8f558 | ||
|
|
bb04ba528c | ||
|
|
b94a795189 | ||
|
|
9968184733 | ||
|
|
1be6f8f87a | ||
|
|
426821cceb | ||
|
|
4fec0deaf7 | ||
|
|
144ac5b6ce | ||
|
|
97c73786fa | ||
|
|
82e59d7da0 | ||
|
|
b2c10de6af | ||
|
|
d72029c2c6 | ||
|
|
17b9987063 | ||
|
|
fde07da2b7 | ||
|
|
c23bc29511 | ||
|
|
714cad2a52 | ||
|
|
357d5d2fde | ||
|
|
d477cce901 | ||
|
|
eb6af52ad1 | ||
|
|
aae75023a7 | ||
|
|
41dcd4f458 | ||
|
|
4651ae4495 | ||
|
|
ed61e0b0fc | ||
|
|
1eefc6fbf4 | ||
|
|
09ebf2cea2 | ||
|
|
b3b0c4cd65 | ||
|
|
f4b7924e8f | ||
|
|
ea68d38b82 | ||
|
|
dfbaa71132 | ||
|
|
6c328deb08 | ||
|
|
add564d5bf | ||
|
|
fa94acb426 | ||
|
|
6827468f13 | ||
|
|
53fd43868f | ||
|
|
9ced7561c5 | ||
|
|
31d55d3425 | ||
|
|
171d2a5bb9 | ||
|
|
c5d05c1205 | ||
|
|
2973e0559a | ||
|
|
ec27288dcf | ||
|
|
f92e5c7093 | ||
|
|
7c67155c49 | ||
|
|
b102cd4652 | ||
|
|
67f9a48c37 | ||
|
|
a0c8a1ee65 | ||
|
|
7e7d272b06 | ||
|
|
3c642240ae | ||
|
|
b5157fcaf1 | ||
|
|
d1cb42f1bc | ||
|
|
84cde1a16a | ||
|
|
877f5db1ce | ||
|
|
787164e245 | ||
|
|
d77fc5e7c5 | ||
|
|
cca39a67d6 | ||
|
|
a6c9a0431a | ||
|
|
729a80a639 | ||
|
|
31cb3001f6 | ||
|
|
5d0f54a329 | ||
|
|
c8c3f5b5b7 | ||
|
|
ba473ed75a | ||
|
|
7236fd59f8 | ||
|
|
9471e8f1fd | ||
|
|
a2d39b51bb | ||
|
|
2920934b55 | ||
|
|
3f709d448e | ||
|
|
b79f66183f | ||
|
|
8672f57e55 | ||
|
|
1e99c82351 | ||
|
|
1a2ff851f3 | ||
|
|
f1c27c3959 | ||
|
|
b30dac0f15 | ||
|
|
cc79e5cdaf | ||
|
|
d9a3b2f2cb | ||
|
|
479b528d09 | ||
|
|
461fb84fb9 | ||
|
|
bd7685e3fa | ||
|
|
cd98cb64b3 | ||
|
|
0f32a3ec24 | ||
|
|
ca446cac87 | ||
|
|
6ea907ffda | ||
|
|
5287baa70d | ||
|
|
25935fec84 | ||
|
|
e855a063ff | ||
|
|
c726b8c9f0 | ||
|
|
13cb99290e | ||
|
|
cea9413fd1 | ||
|
|
1432853b39 | ||
|
|
6d6c2b86e8 | ||
|
|
77b1d964b5 | ||
|
|
549936fc09 | ||
|
|
c9c32f09c5 | ||
|
|
77f7778d4a | ||
|
|
84b6be9364 | ||
|
|
1e43b55804 | ||
|
|
ba9bdaae0a | ||
|
|
7dfd7bde8e | ||
|
|
5e6c4161d0 | ||
|
|
d75d56dfc9 | ||
|
|
1d9d350091 | ||
|
|
5744053c6f | ||
|
|
65589b6ca2 | ||
|
|
e03a9d1137 | ||
|
|
29f80f2276 | ||
|
|
a9b74aa69b | ||
|
|
63ebfd3210 | ||
|
|
87fa5ff7a6 | ||
|
|
b686b53a9c | ||
|
|
258261dc64 | ||
|
|
9af5c9ead9 | ||
|
|
382654188c | ||
|
|
fa1df082b7 | ||
|
|
5c227d8f80 | ||
|
|
81dabdbfb7 | ||
|
|
91f89f5a33 | ||
|
|
9f92746aa0 | ||
|
|
5d6e6f9441 | ||
|
|
01395a2726 | ||
|
|
465d75c65d | ||
|
|
4634f8927e | ||
|
|
74a287f9fe | ||
|
|
7ff6c79835 | ||
|
|
3629982237 | ||
|
|
ddb610f1bc | ||
|
|
f899905d27 | ||
|
|
3e4531b5c5 | ||
|
|
a9e189e51d | ||
|
|
58ba08a8f3 | ||
|
|
9078ff27d8 | ||
|
|
6f43e61c24 | ||
|
|
4be0d3f212 | ||
|
|
00e47e5a27 | ||
|
|
152e145b32 | ||
|
|
54e55e8f57 | ||
|
|
05b8707f9e | ||
|
|
543e952023 | ||
|
|
6e5f40ea06 | ||
|
|
bbafb0be87 | ||
|
|
1c9c5232fe | ||
|
|
598d79a502 | ||
|
|
37d8360b77 | ||
|
|
82d9ca3317 | ||
|
|
4e4238d486 | ||
|
|
c77dbe44dc | ||
|
|
e03737f15f | ||
|
|
a02629bcd7 | ||
|
|
6c3fc23d78 | ||
|
|
0fe40f9ccb | ||
|
|
9bd7c8edd1 | ||
|
|
83ba480863 | ||
|
|
f158ea25e9 | ||
|
|
0227519eab | ||
|
|
616a9685fa | ||
|
|
fe61b01320 | ||
|
|
7b25144311 | ||
|
|
9d42fbbdd7 | ||
|
|
39ac5b088b | ||
|
|
c14ffd08a0 | ||
|
|
6e1239340b | ||
|
|
a297dc8b3b | ||
|
|
8d4ecc0898 | ||
|
|
eae9c04429 | ||
|
|
a41c48a9c5 | ||
|
|
ff2a94bd9b | ||
|
|
4a1f5558b8 | ||
|
|
608db9889f | ||
|
|
012b697337 | ||
|
|
0580506cf3 | ||
|
|
ff4ab9b661 | ||
|
|
b7ce5fdd3e | ||
|
|
a11e617322 | ||
|
|
d0beac7e2b | ||
|
|
9db497092f | ||
|
|
8eb91c08aa | ||
|
|
ded5437522 | ||
|
|
9348657951 | ||
|
|
bca85933f7 | ||
|
|
c32bb35f1c | ||
|
|
4b84062d62 | ||
|
|
d6d0f8fa17 | ||
|
|
dd72c875d3 | ||
|
|
1a1df50300 | ||
|
|
53cbb527b4 | ||
|
|
8b87b2717e | ||
|
|
1007d6dac7 | ||
|
|
6799fac120 | ||
|
|
558e6288ca | ||
|
|
d9cb73291b | ||
|
|
d0f7be3ac3 | ||
|
|
331e16d3ca | ||
|
|
0db246c311 | ||
|
|
94dc62ff58 | ||
|
|
e68ecf6844 | ||
|
|
5167b0a8c6 | ||
|
|
77e3d3786d | ||
|
|
708d4d39bc | ||
|
|
2a8cda2a1e | ||
|
|
8d783840ad | ||
|
|
abe39d5790 | ||
|
|
d7868e9e5a | ||
|
|
7b84e36e15 | ||
|
|
6cab6d69d8 | ||
|
|
87846d7aef | ||
|
|
2557769c6a | ||
|
|
48375f3878 | ||
|
|
176c85d8c1 | ||
|
|
17cad71ede | ||
|
|
e8bf9d4e6f | ||
|
|
7bdd2038ef | ||
|
|
e9f6e7943a | ||
|
|
e74ba387ab | ||
|
|
27c79e5b99 | ||
|
|
8170d5ea73 | ||
|
|
196f73705d | ||
|
|
ad0bbf5248 | ||
|
|
4cae9cd90d | ||
|
|
be7bc55a76 | ||
|
|
684b545e8f | ||
|
|
7835cc3b10 | ||
|
|
f8706b51e8 | ||
|
|
d97f8fd5da | ||
|
|
f8fa87441e | ||
|
|
d42537814a | ||
|
|
792421b0e2 | ||
|
|
72d55a010b | ||
|
|
880d8258ce | ||
|
|
b79bf82efb | ||
|
|
b3118b6253 | ||
|
|
ba172e2e25 | ||
|
|
892d53abeb | ||
|
|
5cbaa1ce98 | ||
|
|
7b35d9ad2e | ||
|
|
8462de7911 | ||
|
|
8721f44298 | ||
|
|
c7a2d69afa | ||
|
|
0453d81e7a | ||
|
|
501c04ac2b | ||
|
|
0ef4e9a5c3 | ||
|
|
129c50e598 | ||
|
|
3e276fc2ac | ||
|
|
658d5e05ae | ||
|
|
4e7d5d476e | ||
|
|
6a55ca20f3 | ||
|
|
c56c537f7f | ||
|
|
fd7d776121 | ||
|
|
1af28190d8 | ||
|
|
6b305be567 | ||
|
|
3bf70513b7 | ||
|
|
7e64404654 | ||
|
|
e1b5226f34 | ||
|
|
0d7128ad31 | ||
|
|
5778626087 | ||
|
|
3ff48756ed | ||
|
|
0ce9a6eeba | ||
|
|
ad527b4aed | ||
|
|
6633bb452e | ||
|
|
efeb0b4feb | ||
|
|
8cc11fc102 | ||
|
|
ee6a167220 | ||
|
|
8d4ad3c405 | ||
|
|
072fbf4d60 | ||
|
|
727c41c283 | ||
|
|
e2266838b6 | ||
|
|
775762d615 | ||
|
|
900c3008cb | ||
|
|
09379213a6 | ||
|
|
ceb97048e3 | ||
|
|
4561515517 | ||
|
|
a7b285759f | ||
|
|
b4531b2a12 | ||
|
|
9e1d261c76 | ||
|
|
e35fa15cd2 | ||
|
|
dbd1f0d4f9 | ||
|
|
9ade78b703 | ||
|
|
f20e244b5f | ||
|
|
0989308b7e | ||
|
|
12c7140536 | ||
|
|
2a0b605e92 | ||
|
|
6978890e6a | ||
|
|
561abd6cb9 | ||
|
|
4dd6227f0b | ||
|
|
1ec314c31c | ||
|
|
a2be5a00be | ||
|
|
4e2241c115 | ||
|
|
8459bca64a | ||
|
|
24cb0565b9 | ||
|
|
9442acb028 | ||
|
|
4f7f181a42 | ||
|
|
b7dd8737a7 | ||
|
|
2207eeb727 | ||
|
|
89dad7dfe7 | ||
|
|
e5803d0cf3 | ||
|
|
c1fffe9ae6 | ||
|
|
9e6cbd3d32 | ||
|
|
2ea8742510 | ||
|
|
5cfa0254f9 | ||
|
|
8cd2544f78 | ||
|
|
c03b768364 | ||
|
|
d60481ead4 | ||
|
|
126be3827d | ||
|
|
121274dca2 | ||
|
|
0ecf8da27e | ||
|
|
4a6bcb525d | ||
|
|
83f9ee50dd | ||
|
|
2bff297f79 | ||
|
|
dee68f6933 | ||
|
|
afa1e19c83 | ||
|
|
6052088eb4 | ||
|
|
c7fa5167c4 | ||
|
|
1034b0b146 | ||
|
|
8bcc4e5945 | ||
|
|
c3c24aa1db | ||
|
|
281c75d2d2 | ||
|
|
52307420f3 | ||
|
|
6185347cd8 | ||
|
|
b6cd29f77e | ||
|
|
b8ea8b1567 | ||
|
|
2f7dc98830 | ||
|
|
e248a99f79 | ||
|
|
4fb6d9aa5d | ||
|
|
f092ea8d67 | ||
|
|
c32cbbdda6 | ||
|
|
2497675259 | ||
|
|
8d084ab90a | ||
|
|
2398773ef0 | ||
|
|
a05998a30e | ||
|
|
f863c29194 | ||
|
|
d16a98c788 | ||
|
|
9421b02e96 | ||
|
|
10256864e4 | ||
|
|
85d010615d | ||
|
|
cd1cb186be | ||
|
|
4458354d70 | ||
|
|
0f27da8808 | ||
|
|
dd76bfa3c2 | ||
|
|
5780a66f7d | ||
|
|
d4342c034c | ||
|
|
1ec43f2530 | ||
|
|
3c300d8fdf | ||
|
|
23119b55d1 | ||
|
|
c8fb0e8f8a | ||
|
|
0ec32a77ef | ||
|
|
52921bfce8 | ||
|
|
960b929097 | ||
|
|
d4ce23eced | ||
|
|
6925510f44 | ||
|
|
9827ad4c22 | ||
|
|
ef8aaee028 | ||
|
|
3d7d39f248 | ||
|
|
3eac620560 | ||
|
|
ab17006956 | ||
|
|
bfc6889ee9 | ||
|
|
0ec0b4a044 | ||
|
|
f1a523f327 | ||
|
|
4181449aea | ||
|
|
e192f8db52 | ||
|
|
8097c681ac | ||
|
|
f45938bdd5 | ||
|
|
6ea4e97eca | ||
|
|
f274c8e837 | ||
|
|
335e571485 |
@@ -1,7 +1,6 @@
|
|||||||
FROM python:3.8-slim
|
FROM python:3.9.2-slim
|
||||||
|
|
||||||
ENV TACTICAL_DIR /opt/tactical
|
ENV TACTICAL_DIR /opt/tactical
|
||||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
|
||||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||||
ENV WORKSPACE_DIR /workspace
|
ENV WORKSPACE_DIR /workspace
|
||||||
ENV TACTICAL_USER tactical
|
ENV TACTICAL_USER tactical
|
||||||
@@ -9,14 +8,11 @@ ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
|||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
EXPOSE 8000
|
EXPOSE 8000 8383 8005
|
||||||
|
|
||||||
RUN groupadd -g 1000 tactical && \
|
RUN groupadd -g 1000 tactical && \
|
||||||
useradd -u 1000 -g 1000 tactical
|
useradd -u 1000 -g 1000 tactical
|
||||||
|
|
||||||
# Copy Go Files
|
|
||||||
COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go
|
|
||||||
|
|
||||||
# Copy Dev python reqs
|
# Copy Dev python reqs
|
||||||
COPY ./requirements.txt /
|
COPY ./requirements.txt /
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ version: '3.4'
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
api-dev:
|
api-dev:
|
||||||
|
container_name: trmm-api-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
restart: always
|
restart: always
|
||||||
build:
|
build:
|
||||||
@@ -21,9 +22,10 @@ services:
|
|||||||
- tactical-backend
|
- tactical-backend
|
||||||
|
|
||||||
app-dev:
|
app-dev:
|
||||||
image: node:12-alpine
|
container_name: trmm-app-dev
|
||||||
|
image: node:14-alpine
|
||||||
restart: always
|
restart: always
|
||||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||||
working_dir: /workspace/web
|
working_dir: /workspace/web
|
||||||
volumes:
|
volumes:
|
||||||
- ..:/workspace:cached
|
- ..:/workspace:cached
|
||||||
@@ -36,6 +38,7 @@ services:
|
|||||||
|
|
||||||
# nats
|
# nats
|
||||||
nats-dev:
|
nats-dev:
|
||||||
|
container_name: trmm-nats-dev
|
||||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -55,6 +58,7 @@ services:
|
|||||||
|
|
||||||
# meshcentral container
|
# meshcentral container
|
||||||
meshcentral-dev:
|
meshcentral-dev:
|
||||||
|
container_name: trmm-meshcentral-dev
|
||||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -77,6 +81,7 @@ services:
|
|||||||
|
|
||||||
# mongodb container for meshcentral
|
# mongodb container for meshcentral
|
||||||
mongodb-dev:
|
mongodb-dev:
|
||||||
|
container_name: trmm-mongodb-dev
|
||||||
image: mongo:4.4
|
image: mongo:4.4
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -92,6 +97,7 @@ services:
|
|||||||
|
|
||||||
# postgres database for api service
|
# postgres database for api service
|
||||||
postgres-dev:
|
postgres-dev:
|
||||||
|
container_name: trmm-postgres-dev
|
||||||
image: postgres:13-alpine
|
image: postgres:13-alpine
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -107,14 +113,19 @@ services:
|
|||||||
|
|
||||||
# redis container for celery tasks
|
# redis container for celery tasks
|
||||||
redis-dev:
|
redis-dev:
|
||||||
|
container_name: trmm-redis-dev
|
||||||
restart: always
|
restart: always
|
||||||
|
command: redis-server --appendonly yes
|
||||||
image: redis:6.0-alpine
|
image: redis:6.0-alpine
|
||||||
|
volumes:
|
||||||
|
- redis-data-dev:/data
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
aliases:
|
aliases:
|
||||||
- tactical-redis
|
- tactical-redis
|
||||||
|
|
||||||
init-dev:
|
init-dev:
|
||||||
|
container_name: trmm-init-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
@@ -143,6 +154,7 @@ services:
|
|||||||
|
|
||||||
# container for celery worker service
|
# container for celery worker service
|
||||||
celery-dev:
|
celery-dev:
|
||||||
|
container_name: trmm-celery-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
@@ -160,6 +172,7 @@ services:
|
|||||||
|
|
||||||
# container for celery beat service
|
# container for celery beat service
|
||||||
celerybeat-dev:
|
celerybeat-dev:
|
||||||
|
container_name: trmm-celerybeat-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
@@ -175,8 +188,29 @@ services:
|
|||||||
- postgres-dev
|
- postgres-dev
|
||||||
- redis-dev
|
- redis-dev
|
||||||
|
|
||||||
nginx-dev:
|
# container for websockets communication
|
||||||
|
websockets-dev:
|
||||||
|
container_name: trmm-websockets-dev
|
||||||
|
image: api-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-websockets-dev"]
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
dev:
|
||||||
|
aliases:
|
||||||
|
- tactical-websockets
|
||||||
|
volumes:
|
||||||
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- ..:/workspace:cached
|
||||||
|
depends_on:
|
||||||
|
- postgres-dev
|
||||||
|
- redis-dev
|
||||||
|
|
||||||
# container for tactical reverse proxy
|
# container for tactical reverse proxy
|
||||||
|
nginx-dev:
|
||||||
|
container_name: trmm-nginx-dev
|
||||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
@@ -196,11 +230,27 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- tactical-data-dev:/opt/tactical
|
- tactical-data-dev:/opt/tactical
|
||||||
|
|
||||||
|
mkdocs-dev:
|
||||||
|
container_name: trmm-mkdocs-dev
|
||||||
|
image: api-dev
|
||||||
|
restart: always
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./api.dockerfile
|
||||||
|
command: ["tactical-mkdocs-dev"]
|
||||||
|
ports:
|
||||||
|
- "8005:8005"
|
||||||
|
volumes:
|
||||||
|
- ..:/workspace:cached
|
||||||
|
networks:
|
||||||
|
- dev
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
tactical-data-dev:
|
tactical-data-dev:
|
||||||
postgres-data-dev:
|
postgres-data-dev:
|
||||||
mongo-dev-data:
|
mongo-dev-data:
|
||||||
mesh-data-dev:
|
mesh-data-dev:
|
||||||
|
redis-data-dev:
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
|
|||||||
@@ -100,6 +100,7 @@ MESH_USERNAME = '${MESH_USER}'
|
|||||||
MESH_SITE = 'https://${MESH_HOST}'
|
MESH_SITE = 'https://${MESH_HOST}'
|
||||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||||
REDIS_HOST = '${REDIS_HOST}'
|
REDIS_HOST = '${REDIS_HOST}'
|
||||||
|
ADMIN_ENABLED = True
|
||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
|
|
||||||
@@ -126,7 +127,7 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
|||||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||||
|
|
||||||
# setup Python virtual env and install dependencies
|
# setup Python virtual env and install dependencies
|
||||||
! test -e "${VIRTUAL_ENV}" && python -m venv --copies ${VIRTUAL_ENV}
|
! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV}
|
||||||
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
||||||
|
|
||||||
django_setup
|
django_setup
|
||||||
@@ -135,10 +136,11 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
|||||||
webenv="$(cat << EOF
|
webenv="$(cat << EOF
|
||||||
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||||
APP_URL = https://${APP_HOST}
|
APP_URL = "https://${APP_HOST}"
|
||||||
|
DOCKER_BUILD = 1
|
||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
echo "${webenv}" | tee "${WORKSPACE_DIR}"/web/.env > /dev/null
|
||||||
|
|
||||||
# chown everything to tactical user
|
# chown everything to tactical user
|
||||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||||
@@ -149,9 +151,6 @@ EOF
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$1" = 'tactical-api' ]; then
|
if [ "$1" = 'tactical-api' ]; then
|
||||||
cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
|
||||||
chmod +x /usr/local/bin/goversioninfo
|
|
||||||
|
|
||||||
check_tactical_ready
|
check_tactical_ready
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
||||||
fi
|
fi
|
||||||
@@ -166,3 +165,13 @@ if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
|||||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-websockets-dev' ]; then
|
||||||
|
check_tactical_ready
|
||||||
|
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = 'tactical-mkdocs-dev' ]; then
|
||||||
|
cd "${WORKSPACE_DIR}/docs"
|
||||||
|
"${VIRTUAL_ENV}"/bin/mkdocs serve
|
||||||
|
fi
|
||||||
|
|||||||
@@ -1,40 +1,26 @@
|
|||||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||||
amqp==5.0.5
|
asyncio-nats-client
|
||||||
asgiref==3.3.1
|
celery
|
||||||
asyncio-nats-client==0.11.4
|
channels
|
||||||
billiard==3.6.3.0
|
channels_redis
|
||||||
celery==5.0.5
|
Django
|
||||||
certifi==2020.12.5
|
django-cors-headers
|
||||||
cffi==1.14.5
|
django-rest-knox
|
||||||
chardet==4.0.0
|
djangorestframework
|
||||||
cryptography==3.4.4
|
loguru
|
||||||
decorator==4.4.2
|
msgpack
|
||||||
Django==3.1.6
|
psycopg2-binary
|
||||||
django-cors-headers==3.7.0
|
pycparser
|
||||||
django-rest-knox==4.1.0
|
pycryptodome
|
||||||
djangorestframework==3.12.2
|
pyotp
|
||||||
future==0.18.2
|
pyparsing
|
||||||
kombu==5.0.2
|
pytz
|
||||||
loguru==0.5.3
|
qrcode
|
||||||
msgpack==1.0.2
|
redis
|
||||||
packaging==20.8
|
twilio
|
||||||
psycopg2-binary==2.8.6
|
packaging
|
||||||
pycparser==2.20
|
validators
|
||||||
pycryptodome==3.10.1
|
websockets
|
||||||
pyotp==2.6.0
|
|
||||||
pyparsing==2.4.7
|
|
||||||
pytz==2021.1
|
|
||||||
qrcode==6.1
|
|
||||||
redis==3.5.3
|
|
||||||
requests==2.25.1
|
|
||||||
six==1.15.0
|
|
||||||
sqlparse==0.4.1
|
|
||||||
twilio==6.52.0
|
|
||||||
urllib3==1.26.3
|
|
||||||
validators==0.18.2
|
|
||||||
vine==5.0.0
|
|
||||||
websockets==8.1
|
|
||||||
zipp==3.4.0
|
|
||||||
black
|
black
|
||||||
Werkzeug
|
Werkzeug
|
||||||
django-extensions
|
django-extensions
|
||||||
@@ -44,3 +30,7 @@ model_bakery
|
|||||||
mkdocs
|
mkdocs
|
||||||
mkdocs-material
|
mkdocs-material
|
||||||
pymdown-extensions
|
pymdown-extensions
|
||||||
|
Pygments
|
||||||
|
mypy
|
||||||
|
pysnooper
|
||||||
|
isort
|
||||||
|
|||||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
github: wh1te909
|
github: wh1te909
|
||||||
patreon: # Replace with a single Patreon username
|
patreon: # Replace with a single Patreon username
|
||||||
open_collective: # Replace with a single Open Collective username
|
open_collective: # Replace with a single Open Collective username
|
||||||
ko_fi: # Replace with a single Ko-fi username
|
ko_fi: tacticalrmm
|
||||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
liberapay: # Replace with a single Liberapay username
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
|||||||
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a bug report
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Server Info (please complete the following information):**
|
||||||
|
- OS: [e.g. Ubuntu 20.04, Debian 10]
|
||||||
|
- Browser: [e.g. chrome, safari]
|
||||||
|
- RMM Version (as shown in top left of web UI):
|
||||||
|
|
||||||
|
**Installation Method:**
|
||||||
|
- [ ] Standard
|
||||||
|
- [ ] Docker
|
||||||
|
|
||||||
|
**Agent Info (please complete the following information):**
|
||||||
|
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||||
|
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
22
.github/workflows/deploy-docs.yml
vendored
Normal file
22
.github/workflows/deploy-docs.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: Deploy Docs
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: docs
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.x
|
||||||
|
- run: pip install --upgrade pip
|
||||||
|
- run: pip install --upgrade setuptools wheel
|
||||||
|
- run: pip install mkdocs mkdocs-material pymdown-extensions
|
||||||
|
- run: mkdocs gh-deploy --force
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -45,3 +45,6 @@ htmlcov/
|
|||||||
docker-compose.dev.yml
|
docker-compose.dev.yml
|
||||||
docs/.vuepress/dist
|
docs/.vuepress/dist
|
||||||
nats-rmm.conf
|
nats-rmm.conf
|
||||||
|
.mypy_cache
|
||||||
|
docs/site/
|
||||||
|
reset_db.sh
|
||||||
|
|||||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -3,7 +3,14 @@
|
|||||||
"python.languageServer": "Pylance",
|
"python.languageServer": "Pylance",
|
||||||
"python.analysis.extraPaths": [
|
"python.analysis.extraPaths": [
|
||||||
"api/tacticalrmm",
|
"api/tacticalrmm",
|
||||||
|
"api/env",
|
||||||
],
|
],
|
||||||
|
"python.analysis.diagnosticSeverityOverrides": {
|
||||||
|
"reportUnusedImport": "error",
|
||||||
|
"reportDuplicateImport": "error",
|
||||||
|
},
|
||||||
|
"python.analysis.memory.keepLibraryAst": true,
|
||||||
|
"python.linting.mypyEnabled": true,
|
||||||
"python.analysis.typeCheckingMode": "basic",
|
"python.analysis.typeCheckingMode": "basic",
|
||||||
"python.formatting.provider": "black",
|
"python.formatting.provider": "black",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
|
|||||||
102
README.md
102
README.md
@@ -8,13 +8,13 @@
|
|||||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||||
|
|
||||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||||
|
|
||||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
|
||||||
|
|
||||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||||
|
|
||||||
|
### [Documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Teamviewer-like remote desktop control
|
- Teamviewer-like remote desktop control
|
||||||
@@ -33,98 +33,6 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
|||||||
|
|
||||||
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
|
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
|
||||||
|
|
||||||
## Installation
|
## Installation / Backup / Restore / Usage
|
||||||
|
|
||||||
### Requirements
|
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||||
- VPS with 2GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
|
||||||
- A domain you own with at least 3 subdomains
|
|
||||||
- Google Authenticator app (2 factor is NOT optional)
|
|
||||||
|
|
||||||
### Docker
|
|
||||||
Refer to the [docker setup](docker/readme.md)
|
|
||||||
|
|
||||||
|
|
||||||
### Installation example (Ubuntu server 20.04 LTS)
|
|
||||||
|
|
||||||
Fresh VPS with latest updates\
|
|
||||||
login as root and create a user and add to sudoers group (we will be creating a user called tactical)
|
|
||||||
```
|
|
||||||
apt update && apt -y upgrade
|
|
||||||
adduser tactical
|
|
||||||
usermod -a -G sudo tactical
|
|
||||||
```
|
|
||||||
|
|
||||||
switch to the tactical user and setup the firewall
|
|
||||||
```
|
|
||||||
su - tactical
|
|
||||||
sudo ufw default deny incoming
|
|
||||||
sudo ufw default allow outgoing
|
|
||||||
sudo ufw allow ssh
|
|
||||||
sudo ufw allow http
|
|
||||||
sudo ufw allow https
|
|
||||||
sudo ufw allow proto tcp from any to any port 4222
|
|
||||||
sudo ufw enable && sudo ufw reload
|
|
||||||
```
|
|
||||||
|
|
||||||
Our domain for this example is tacticalrmm.com
|
|
||||||
|
|
||||||
In the DNS manager of wherever our domain is hosted, we will create three A records, all pointing to the public IP address of our VPS
|
|
||||||
|
|
||||||
Create A record ```api.tacticalrmm.com``` for the django rest backend\
|
|
||||||
Create A record ```rmm.tacticalrmm.com``` for the vue frontend\
|
|
||||||
Create A record ```mesh.tacticalrmm.com``` for meshcentral
|
|
||||||
|
|
||||||
Download the install script and run it
|
|
||||||
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh
|
|
||||||
chmod +x install.sh
|
|
||||||
./install.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Links will be provided at the end of the install script.\
|
|
||||||
Download the executable from the first link, then open ```rmm.tacticalrmm.com``` and login.\
|
|
||||||
Upload the executable when prompted during the initial setup page.
|
|
||||||
|
|
||||||
|
|
||||||
### Install an agent
|
|
||||||
From the app's dashboard, choose Agents > Install Agent to generate an installer.
|
|
||||||
|
|
||||||
## Updating
|
|
||||||
Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh)
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
|
|
||||||
chmod +x update.sh
|
|
||||||
./update.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Backup
|
|
||||||
Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh)
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
|
||||||
```
|
|
||||||
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
|
|
||||||
|
|
||||||
Run it
|
|
||||||
```
|
|
||||||
chmod +x backup.sh
|
|
||||||
./backup.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Restore
|
|
||||||
Change your 3 A records to point to new server's public IP
|
|
||||||
|
|
||||||
Create same linux user account as old server and add to sudoers group and setup firewall (see install instructions above)
|
|
||||||
|
|
||||||
Copy backup file to new server
|
|
||||||
|
|
||||||
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
|
|
||||||
```
|
|
||||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Run the restore script, passing it the backup tar file as the first argument
|
|
||||||
```
|
|
||||||
chmod +x restore.sh
|
|
||||||
./restore.sh rmm-backup-xxxxxxx.tar
|
|
||||||
```
|
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from rest_framework.authtoken.admin import TokenAdmin
|
from rest_framework.authtoken.admin import TokenAdmin
|
||||||
|
|
||||||
from .models import User
|
from .models import User, Role
|
||||||
|
|
||||||
admin.site.register(User)
|
admin.site.register(User)
|
||||||
TokenAdmin.raw_id_fields = ("user",)
|
TokenAdmin.raw_id_fields = ("user",)
|
||||||
|
admin.site.register(Role)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from accounts.models import User
|
|||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Generates barcode for Google Authenticator and creates totp for user"
|
help = "Generates barcode for Authenticator and creates totp for user"
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument("code", type=str)
|
parser.add_argument("code", type=str)
|
||||||
@@ -26,12 +26,10 @@ class Command(BaseCommand):
|
|||||||
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||||
subprocess.run(f'qr "{url}"', shell=True)
|
subprocess.run(f'qr "{url}"', shell=True)
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.SUCCESS(
|
self.style.SUCCESS("Scan the barcode above with your authenticator app")
|
||||||
"Scan the barcode above with your google authenticator app"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.SUCCESS(
|
self.style.SUCCESS(
|
||||||
f"If that doesn't work you may manually enter the key: {code}"
|
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|||||||
57
api/tacticalrmm/accounts/management/commands/reset_2fa.py
Normal file
57
api/tacticalrmm/accounts/management/commands/reset_2fa.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import pyotp
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Reset 2fa"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("username", type=str)
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
username = kwargs["username"]
|
||||||
|
try:
|
||||||
|
user = User.objects.get(username=username)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||||
|
return
|
||||||
|
|
||||||
|
domain = "Tactical RMM"
|
||||||
|
nginx = "/etc/nginx/sites-available/frontend.conf"
|
||||||
|
found = None
|
||||||
|
if os.path.exists(nginx):
|
||||||
|
try:
|
||||||
|
with open(nginx, "r") as f:
|
||||||
|
for line in f:
|
||||||
|
if "server_name" in line:
|
||||||
|
found = line
|
||||||
|
break
|
||||||
|
|
||||||
|
if found:
|
||||||
|
rep = found.replace("server_name", "").replace(";", "")
|
||||||
|
domain = "".join(rep.split())
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
code = pyotp.random_base32()
|
||||||
|
user.totp_key = code
|
||||||
|
user.save(update_fields=["totp_key"])
|
||||||
|
|
||||||
|
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
|
||||||
|
subprocess.run(f'qr "{url}"', shell=True)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.WARNING("Scan the barcode above with your authenticator app")
|
||||||
|
)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.WARNING(
|
||||||
|
f"If that doesn't work you may manually enter the setup key: {code}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(f"2fa was successfully reset for user {username}")
|
||||||
|
)
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Reset password for user"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("username", type=str)
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
username = kwargs["username"]
|
||||||
|
try:
|
||||||
|
user = User.objects.get(username=username)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
|
||||||
|
return
|
||||||
|
|
||||||
|
passwd = input("Enter new password: ")
|
||||||
|
user.set_password(passwd)
|
||||||
|
user.save()
|
||||||
|
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-02-28 06:38
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0011_user_default_agent_tbl_tab'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='agents_per_page',
|
||||||
|
field=models.PositiveIntegerField(default=50),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-09 02:33
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0012_user_agents_per_page'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='client_tree_sort',
|
||||||
|
field=models.CharField(choices=[('alphafail', 'Move failing clients to the top'), ('alpha', 'Sort alphabetically')], default='alphafail', max_length=50),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-04-11 01:43
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0013_user_client_tree_sort'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='client_tree_splitter',
|
||||||
|
field=models.PositiveIntegerField(default=11),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-04-11 03:03
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0014_user_client_tree_splitter'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='loading_bar_color',
|
||||||
|
field=models.CharField(default='red', max_length=255),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-05-07 15:26
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0022_urlaction'),
|
||||||
|
('accounts', '0015_user_loading_bar_color'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='url_action',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='user', to='core.urlaction'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='user',
|
||||||
|
name='agent_dblclick_action',
|
||||||
|
field=models.CharField(choices=[('editagent', 'Edit Agent'), ('takecontrol', 'Take Control'), ('remotebg', 'Remote Background'), ('urlaction', 'URL Action')], default='editagent', max_length=50),
|
||||||
|
),
|
||||||
|
]
|
||||||
173
api/tacticalrmm/accounts/migrations/0017_auto_20210508_1716.py
Normal file
173
api/tacticalrmm/accounts/migrations/0017_auto_20210508_1716.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-05-08 17:16
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0016_auto_20210507_1526'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_code_sign',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_do_server_maint',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_edit_agent',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_edit_core_settings',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_install_agents',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_accounts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_alerts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_automation_policies',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_autotasks',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_checks',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_clients',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_deployments',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_notes',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_pendingactions',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_procs',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_scripts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_sites',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_software',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_winsvcs',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_winupdates',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_reboot_agents',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_autotasks',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_bulk',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_checks',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_scripts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_send_cmd',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_uninstall_agents',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_update_agents',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_use_mesh',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_view_auditlogs',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_view_debuglogs',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_view_eventlogs',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
181
api/tacticalrmm/accounts/migrations/0018_auto_20210511_0233.py
Normal file
181
api/tacticalrmm/accounts/migrations/0018_auto_20210511_0233.py
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0017_auto_20210508_1716'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Role',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('name', models.CharField(max_length=255, unique=True)),
|
||||||
|
('is_superuser', models.BooleanField(default=False)),
|
||||||
|
('can_use_mesh', models.BooleanField(default=False)),
|
||||||
|
('can_uninstall_agents', models.BooleanField(default=False)),
|
||||||
|
('can_update_agents', models.BooleanField(default=False)),
|
||||||
|
('can_edit_agent', models.BooleanField(default=False)),
|
||||||
|
('can_manage_procs', models.BooleanField(default=False)),
|
||||||
|
('can_view_eventlogs', models.BooleanField(default=False)),
|
||||||
|
('can_send_cmd', models.BooleanField(default=False)),
|
||||||
|
('can_reboot_agents', models.BooleanField(default=False)),
|
||||||
|
('can_install_agents', models.BooleanField(default=False)),
|
||||||
|
('can_run_scripts', models.BooleanField(default=False)),
|
||||||
|
('can_run_bulk', models.BooleanField(default=False)),
|
||||||
|
('can_manage_notes', models.BooleanField(default=False)),
|
||||||
|
('can_edit_core_settings', models.BooleanField(default=False)),
|
||||||
|
('can_do_server_maint', models.BooleanField(default=False)),
|
||||||
|
('can_code_sign', models.BooleanField(default=False)),
|
||||||
|
('can_manage_checks', models.BooleanField(default=False)),
|
||||||
|
('can_run_checks', models.BooleanField(default=False)),
|
||||||
|
('can_manage_clients', models.BooleanField(default=False)),
|
||||||
|
('can_manage_sites', models.BooleanField(default=False)),
|
||||||
|
('can_manage_deployments', models.BooleanField(default=False)),
|
||||||
|
('can_manage_automation_policies', models.BooleanField(default=False)),
|
||||||
|
('can_manage_autotasks', models.BooleanField(default=False)),
|
||||||
|
('can_run_autotasks', models.BooleanField(default=False)),
|
||||||
|
('can_view_auditlogs', models.BooleanField(default=False)),
|
||||||
|
('can_manage_pendingactions', models.BooleanField(default=False)),
|
||||||
|
('can_view_debuglogs', models.BooleanField(default=False)),
|
||||||
|
('can_manage_scripts', models.BooleanField(default=False)),
|
||||||
|
('can_manage_alerts', models.BooleanField(default=False)),
|
||||||
|
('can_manage_winsvcs', models.BooleanField(default=False)),
|
||||||
|
('can_manage_software', models.BooleanField(default=False)),
|
||||||
|
('can_manage_winupdates', models.BooleanField(default=False)),
|
||||||
|
('can_manage_accounts', models.BooleanField(default=False)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_code_sign',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_do_server_maint',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_edit_agent',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_edit_core_settings',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_install_agents',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_accounts',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_alerts',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_automation_policies',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_autotasks',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_checks',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_clients',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_deployments',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_notes',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_pendingactions',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_procs',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_scripts',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_sites',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_software',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_winsvcs',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_manage_winupdates',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_reboot_agents',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_autotasks',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_bulk',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_checks',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_run_scripts',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_send_cmd',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_uninstall_agents',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_update_agents',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_use_mesh',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_view_auditlogs',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_view_debuglogs',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='user',
|
||||||
|
name='can_view_eventlogs',
|
||||||
|
),
|
||||||
|
]
|
||||||
25
api/tacticalrmm/accounts/migrations/0019_user_role.py
Normal file
25
api/tacticalrmm/accounts/migrations/0019_user_role.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0018_auto_20210511_0233"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="role",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="roles",
|
||||||
|
to="accounts.role",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-05-11 17:37
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0019_user_role'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_manage_roles',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -7,6 +7,7 @@ AGENT_DBLCLICK_CHOICES = [
|
|||||||
("editagent", "Edit Agent"),
|
("editagent", "Edit Agent"),
|
||||||
("takecontrol", "Take Control"),
|
("takecontrol", "Take Control"),
|
||||||
("remotebg", "Remote Background"),
|
("remotebg", "Remote Background"),
|
||||||
|
("urlaction", "URL Action"),
|
||||||
]
|
]
|
||||||
|
|
||||||
AGENT_TBL_TAB_CHOICES = [
|
AGENT_TBL_TAB_CHOICES = [
|
||||||
@@ -15,6 +16,11 @@ AGENT_TBL_TAB_CHOICES = [
|
|||||||
("mixed", "Mixed"),
|
("mixed", "Mixed"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
CLIENT_TREE_SORT_CHOICES = [
|
||||||
|
("alphafail", "Move failing clients to the top"),
|
||||||
|
("alpha", "Sort alphabetically"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser, BaseAuditModel):
|
class User(AbstractUser, BaseAuditModel):
|
||||||
is_active = models.BooleanField(default=True)
|
is_active = models.BooleanField(default=True)
|
||||||
@@ -24,9 +30,22 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
agent_dblclick_action = models.CharField(
|
agent_dblclick_action = models.CharField(
|
||||||
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||||
)
|
)
|
||||||
|
url_action = models.ForeignKey(
|
||||||
|
"core.URLAction",
|
||||||
|
related_name="user",
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
)
|
||||||
default_agent_tbl_tab = models.CharField(
|
default_agent_tbl_tab = models.CharField(
|
||||||
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||||
)
|
)
|
||||||
|
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
|
||||||
|
client_tree_sort = models.CharField(
|
||||||
|
max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail"
|
||||||
|
)
|
||||||
|
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||||
|
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||||
|
|
||||||
agent = models.OneToOneField(
|
agent = models.OneToOneField(
|
||||||
"agents.Agent",
|
"agents.Agent",
|
||||||
@@ -36,9 +55,123 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
role = models.ForeignKey(
|
||||||
|
"accounts.Role",
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="roles",
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(user):
|
def serialize(user):
|
||||||
# serializes the task and returns json
|
# serializes the task and returns json
|
||||||
from .serializers import UserSerializer
|
from .serializers import UserSerializer
|
||||||
|
|
||||||
return UserSerializer(user).data
|
return UserSerializer(user).data
|
||||||
|
|
||||||
|
|
||||||
|
class Role(models.Model):
|
||||||
|
name = models.CharField(max_length=255, unique=True)
|
||||||
|
is_superuser = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# agents
|
||||||
|
can_use_mesh = models.BooleanField(default=False)
|
||||||
|
can_uninstall_agents = models.BooleanField(default=False)
|
||||||
|
can_update_agents = models.BooleanField(default=False)
|
||||||
|
can_edit_agent = models.BooleanField(default=False)
|
||||||
|
can_manage_procs = models.BooleanField(default=False)
|
||||||
|
can_view_eventlogs = models.BooleanField(default=False)
|
||||||
|
can_send_cmd = models.BooleanField(default=False)
|
||||||
|
can_reboot_agents = models.BooleanField(default=False)
|
||||||
|
can_install_agents = models.BooleanField(default=False)
|
||||||
|
can_run_scripts = models.BooleanField(default=False)
|
||||||
|
can_run_bulk = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# core
|
||||||
|
can_manage_notes = models.BooleanField(default=False)
|
||||||
|
can_edit_core_settings = models.BooleanField(default=False)
|
||||||
|
can_do_server_maint = models.BooleanField(default=False)
|
||||||
|
can_code_sign = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# checks
|
||||||
|
can_manage_checks = models.BooleanField(default=False)
|
||||||
|
can_run_checks = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# clients
|
||||||
|
can_manage_clients = models.BooleanField(default=False)
|
||||||
|
can_manage_sites = models.BooleanField(default=False)
|
||||||
|
can_manage_deployments = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# automation
|
||||||
|
can_manage_automation_policies = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# automated tasks
|
||||||
|
can_manage_autotasks = models.BooleanField(default=False)
|
||||||
|
can_run_autotasks = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# logs
|
||||||
|
can_view_auditlogs = models.BooleanField(default=False)
|
||||||
|
can_manage_pendingactions = models.BooleanField(default=False)
|
||||||
|
can_view_debuglogs = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# scripts
|
||||||
|
can_manage_scripts = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# alerts
|
||||||
|
can_manage_alerts = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# win services
|
||||||
|
can_manage_winsvcs = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# software
|
||||||
|
can_manage_software = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# windows updates
|
||||||
|
can_manage_winupdates = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# accounts
|
||||||
|
can_manage_accounts = models.BooleanField(default=False)
|
||||||
|
can_manage_roles = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def perms():
|
||||||
|
return [
|
||||||
|
"is_superuser",
|
||||||
|
"can_use_mesh",
|
||||||
|
"can_uninstall_agents",
|
||||||
|
"can_update_agents",
|
||||||
|
"can_edit_agent",
|
||||||
|
"can_manage_procs",
|
||||||
|
"can_view_eventlogs",
|
||||||
|
"can_send_cmd",
|
||||||
|
"can_reboot_agents",
|
||||||
|
"can_install_agents",
|
||||||
|
"can_run_scripts",
|
||||||
|
"can_run_bulk",
|
||||||
|
"can_manage_notes",
|
||||||
|
"can_edit_core_settings",
|
||||||
|
"can_do_server_maint",
|
||||||
|
"can_code_sign",
|
||||||
|
"can_manage_checks",
|
||||||
|
"can_run_checks",
|
||||||
|
"can_manage_clients",
|
||||||
|
"can_manage_sites",
|
||||||
|
"can_manage_deployments",
|
||||||
|
"can_manage_automation_policies",
|
||||||
|
"can_manage_autotasks",
|
||||||
|
"can_run_autotasks",
|
||||||
|
"can_view_auditlogs",
|
||||||
|
"can_manage_pendingactions",
|
||||||
|
"can_view_debuglogs",
|
||||||
|
"can_manage_scripts",
|
||||||
|
"can_manage_alerts",
|
||||||
|
"can_manage_winsvcs",
|
||||||
|
"can_manage_software",
|
||||||
|
"can_manage_winupdates",
|
||||||
|
"can_manage_accounts",
|
||||||
|
"can_manage_roles",
|
||||||
|
]
|
||||||
|
|||||||
19
api/tacticalrmm/accounts/permissions.py
Normal file
19
api/tacticalrmm/accounts/permissions.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm
|
||||||
|
|
||||||
|
|
||||||
|
class AccountsPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_accounts")
|
||||||
|
|
||||||
|
|
||||||
|
class RolesPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_roles")
|
||||||
@@ -1,13 +1,28 @@
|
|||||||
import pyotp
|
import pyotp
|
||||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||||
|
|
||||||
from .models import User
|
from .models import User, Role
|
||||||
|
|
||||||
|
|
||||||
|
class UserUISerializer(ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"dark_mode",
|
||||||
|
"show_community_scripts",
|
||||||
|
"agent_dblclick_action",
|
||||||
|
"url_action",
|
||||||
|
"default_agent_tbl_tab",
|
||||||
|
"client_tree_sort",
|
||||||
|
"client_tree_splitter",
|
||||||
|
"loading_bar_color",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class UserSerializer(ModelSerializer):
|
class UserSerializer(ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = User
|
model = User
|
||||||
fields = (
|
fields = [
|
||||||
"id",
|
"id",
|
||||||
"username",
|
"username",
|
||||||
"first_name",
|
"first_name",
|
||||||
@@ -15,7 +30,8 @@ class UserSerializer(ModelSerializer):
|
|||||||
"email",
|
"email",
|
||||||
"is_active",
|
"is_active",
|
||||||
"last_login",
|
"last_login",
|
||||||
)
|
"role",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class TOTPSetupSerializer(ModelSerializer):
|
class TOTPSetupSerializer(ModelSerializer):
|
||||||
@@ -34,3 +50,9 @@ class TOTPSetupSerializer(ModelSerializer):
|
|||||||
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
||||||
obj.username, issuer_name="Tactical RMM"
|
obj.username, issuer_name="Tactical RMM"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RoleSerializer(ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Role
|
||||||
|
fields = "__all__"
|
||||||
|
|||||||
@@ -271,18 +271,15 @@ class TestUserAction(TacticalTestCase):
|
|||||||
|
|
||||||
def test_user_ui(self):
|
def test_user_ui(self):
|
||||||
url = "/accounts/users/ui/"
|
url = "/accounts/users/ui/"
|
||||||
data = {"dark_mode": False}
|
|
||||||
r = self.client.patch(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
data = {"show_community_scripts": True}
|
|
||||||
r = self.client.patch(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"userui": True,
|
"dark_mode": True,
|
||||||
|
"show_community_scripts": True,
|
||||||
"agent_dblclick_action": "editagent",
|
"agent_dblclick_action": "editagent",
|
||||||
"default_agent_tbl_tab": "mixed",
|
"default_agent_tbl_tab": "mixed",
|
||||||
|
"client_tree_sort": "alpha",
|
||||||
|
"client_tree_splitter": 14,
|
||||||
|
"loading_bar_color": "green",
|
||||||
}
|
}
|
||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|||||||
@@ -9,4 +9,7 @@ urlpatterns = [
|
|||||||
path("users/reset_totp/", views.UserActions.as_view()),
|
path("users/reset_totp/", views.UserActions.as_view()),
|
||||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||||
path("users/ui/", views.UserUI.as_view()),
|
path("users/ui/", views.UserUI.as_view()),
|
||||||
|
path("permslist/", views.PermsList.as_view()),
|
||||||
|
path("roles/", views.GetAddRoles.as_view()),
|
||||||
|
path("<int:pk>/role/", views.GetUpdateDeleteRole.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,16 +6,29 @@ from django.shortcuts import get_object_or_404
|
|||||||
from knox.views import LoginView as KnoxLoginView
|
from knox.views import LoginView as KnoxLoginView
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from logs.models import AuditLog
|
from logs.models import AuditLog
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .models import User
|
from .models import User, Role
|
||||||
from .serializers import TOTPSetupSerializer, UserSerializer
|
from .permissions import AccountsPerms, RolesPerms
|
||||||
|
from .serializers import (
|
||||||
|
TOTPSetupSerializer,
|
||||||
|
UserSerializer,
|
||||||
|
UserUISerializer,
|
||||||
|
RoleSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_root_user(request, user) -> bool:
|
||||||
|
return (
|
||||||
|
hasattr(settings, "ROOT_USER")
|
||||||
|
and request.user != user
|
||||||
|
and user.username == settings.ROOT_USER
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CheckCreds(KnoxLoginView):
|
class CheckCreds(KnoxLoginView):
|
||||||
@@ -71,6 +84,8 @@ class LoginView(KnoxLoginView):
|
|||||||
|
|
||||||
|
|
||||||
class GetAddUsers(APIView):
|
class GetAddUsers(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
users = User.objects.filter(agent=None)
|
users = User.objects.filter(agent=None)
|
||||||
|
|
||||||
@@ -79,7 +94,7 @@ class GetAddUsers(APIView):
|
|||||||
def post(self, request):
|
def post(self, request):
|
||||||
# add new user
|
# add new user
|
||||||
try:
|
try:
|
||||||
user = User.objects.create_user(
|
user = User.objects.create_user( # type: ignore
|
||||||
request.data["username"],
|
request.data["username"],
|
||||||
request.data["email"],
|
request.data["email"],
|
||||||
request.data["password"],
|
request.data["password"],
|
||||||
@@ -91,13 +106,17 @@ class GetAddUsers(APIView):
|
|||||||
|
|
||||||
user.first_name = request.data["first_name"]
|
user.first_name = request.data["first_name"]
|
||||||
user.last_name = request.data["last_name"]
|
user.last_name = request.data["last_name"]
|
||||||
# Can be changed once permissions and groups are introduced
|
if "role" in request.data.keys() and isinstance(request.data["role"], int):
|
||||||
user.is_superuser = True
|
role = get_object_or_404(Role, pk=request.data["role"])
|
||||||
|
user.role = role
|
||||||
|
|
||||||
user.save()
|
user.save()
|
||||||
return Response(user.username)
|
return Response(user.username)
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteUser(APIView):
|
class GetUpdateDeleteUser(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
user = get_object_or_404(User, pk=pk)
|
user = get_object_or_404(User, pk=pk)
|
||||||
|
|
||||||
@@ -106,11 +125,7 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
user = get_object_or_404(User, pk=pk)
|
user = get_object_or_404(User, pk=pk)
|
||||||
|
|
||||||
if (
|
if _is_root_user(request, user):
|
||||||
hasattr(settings, "ROOT_USER")
|
|
||||||
and request.user != user
|
|
||||||
and user.username == settings.ROOT_USER
|
|
||||||
):
|
|
||||||
return notify_error("The root user cannot be modified from the UI")
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||||
@@ -121,11 +136,7 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
user = get_object_or_404(User, pk=pk)
|
user = get_object_or_404(User, pk=pk)
|
||||||
if (
|
if _is_root_user(request, user):
|
||||||
hasattr(settings, "ROOT_USER")
|
|
||||||
and request.user != user
|
|
||||||
and user.username == settings.ROOT_USER
|
|
||||||
):
|
|
||||||
return notify_error("The root user cannot be deleted from the UI")
|
return notify_error("The root user cannot be deleted from the UI")
|
||||||
|
|
||||||
user.delete()
|
user.delete()
|
||||||
@@ -134,15 +145,11 @@ class GetUpdateDeleteUser(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class UserActions(APIView):
|
class UserActions(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||||
# reset password
|
# reset password
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
if (
|
if _is_root_user(request, user):
|
||||||
hasattr(settings, "ROOT_USER")
|
|
||||||
and request.user != user
|
|
||||||
and user.username == settings.ROOT_USER
|
|
||||||
):
|
|
||||||
return notify_error("The root user cannot be modified from the UI")
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.set_password(request.data["password"])
|
user.set_password(request.data["password"])
|
||||||
@@ -153,11 +160,7 @@ class UserActions(APIView):
|
|||||||
# reset two factor token
|
# reset two factor token
|
||||||
def put(self, request):
|
def put(self, request):
|
||||||
user = get_object_or_404(User, pk=request.data["id"])
|
user = get_object_or_404(User, pk=request.data["id"])
|
||||||
if (
|
if _is_root_user(request, user):
|
||||||
hasattr(settings, "ROOT_USER")
|
|
||||||
and request.user != user
|
|
||||||
and user.username == settings.ROOT_USER
|
|
||||||
):
|
|
||||||
return notify_error("The root user cannot be modified from the UI")
|
return notify_error("The root user cannot be modified from the UI")
|
||||||
|
|
||||||
user.totp_key = ""
|
user.totp_key = ""
|
||||||
@@ -185,19 +188,48 @@ class TOTPSetup(APIView):
|
|||||||
|
|
||||||
class UserUI(APIView):
|
class UserUI(APIView):
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
user = request.user
|
serializer = UserUISerializer(
|
||||||
|
instance=request.user, data=request.data, partial=True
|
||||||
if "dark_mode" in request.data.keys():
|
)
|
||||||
user.dark_mode = request.data["dark_mode"]
|
serializer.is_valid(raise_exception=True)
|
||||||
user.save(update_fields=["dark_mode"])
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
if "show_community_scripts" in request.data.keys():
|
|
||||||
user.show_community_scripts = request.data["show_community_scripts"]
|
|
||||||
user.save(update_fields=["show_community_scripts"])
|
class PermsList(APIView):
|
||||||
|
def get(self, request):
|
||||||
if "userui" in request.data.keys():
|
return Response(Role.perms())
|
||||||
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
|
||||||
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
|
|
||||||
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
|
class GetAddRoles(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, RolesPerms]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
roles = Role.objects.all()
|
||||||
|
return Response(RoleSerializer(roles, many=True).data)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = RoleSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class GetUpdateDeleteRole(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, RolesPerms]
|
||||||
|
|
||||||
|
def get(self, request, pk):
|
||||||
|
role = get_object_or_404(Role, pk=pk)
|
||||||
|
return Response(RoleSerializer(role).data)
|
||||||
|
|
||||||
|
def put(self, request, pk):
|
||||||
|
role = get_object_or_404(Role, pk=pk)
|
||||||
|
serializer = RoleSerializer(instance=role, data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
def delete(self, request, pk):
|
||||||
|
role = get_object_or_404(Role, pk=pk)
|
||||||
|
role.delete()
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Agent, Note, RecoveryAction
|
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||||
|
|
||||||
admin.site.register(Agent)
|
admin.site.register(Agent)
|
||||||
admin.site.register(RecoveryAction)
|
admin.site.register(RecoveryAction)
|
||||||
admin.site.register(Note)
|
admin.site.register(Note)
|
||||||
|
admin.site.register(AgentCustomField)
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from itertools import cycle
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from model_bakery.recipe import Recipe, foreign_key
|
from model_bakery.recipe import Recipe, foreign_key, seq
|
||||||
|
|
||||||
|
|
||||||
def generate_agent_id(hostname):
|
def generate_agent_id(hostname):
|
||||||
@@ -30,8 +30,7 @@ agent = Recipe(
|
|||||||
hostname="DESKTOP-TEST123",
|
hostname="DESKTOP-TEST123",
|
||||||
version="1.3.0",
|
version="1.3.0",
|
||||||
monitoring_type=cycle(["workstation", "server"]),
|
monitoring_type=cycle(["workstation", "server"]),
|
||||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
agent_id=seq("asdkj3h4234-1234hg3h4g34-234jjh34|DESKTOP-TEST123"),
|
||||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
server_agent = agent.extend(
|
server_agent = agent.extend(
|
||||||
@@ -44,8 +43,12 @@ workstation_agent = agent.extend(
|
|||||||
|
|
||||||
online_agent = agent.extend(last_seen=djangotime.now())
|
online_agent = agent.extend(last_seen=djangotime.now())
|
||||||
|
|
||||||
|
offline_agent = agent.extend(
|
||||||
|
last_seen=djangotime.now() - djangotime.timedelta(minutes=7)
|
||||||
|
)
|
||||||
|
|
||||||
overdue_agent = agent.extend(
|
overdue_agent = agent.extend(
|
||||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=6)
|
last_seen=djangotime.now() - djangotime.timedelta(minutes=35)
|
||||||
)
|
)
|
||||||
|
|
||||||
agent_with_services = agent.extend(
|
agent_with_services = agent.extend(
|
||||||
|
|||||||
@@ -0,0 +1,93 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
from clients.models import Client, Site
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Bulk update agent offline/overdue time"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("time", type=int, help="Time in minutes")
|
||||||
|
parser.add_argument(
|
||||||
|
"--client",
|
||||||
|
type=str,
|
||||||
|
help="Client Name",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--site",
|
||||||
|
type=str,
|
||||||
|
help="Site Name",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--offline",
|
||||||
|
action="store_true",
|
||||||
|
help="Offline",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--overdue",
|
||||||
|
action="store_true",
|
||||||
|
help="Overdue",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--all",
|
||||||
|
action="store_true",
|
||||||
|
help="All agents",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
time = kwargs["time"]
|
||||||
|
client_name = kwargs["client"]
|
||||||
|
site_name = kwargs["site"]
|
||||||
|
all_agents = kwargs["all"]
|
||||||
|
offline = kwargs["offline"]
|
||||||
|
overdue = kwargs["overdue"]
|
||||||
|
agents = None
|
||||||
|
|
||||||
|
if offline and time < 2:
|
||||||
|
self.stdout.write(self.style.ERROR("Minimum offline time is 2 minutes"))
|
||||||
|
return
|
||||||
|
|
||||||
|
if overdue and time < 3:
|
||||||
|
self.stdout.write(self.style.ERROR("Minimum overdue time is 3 minutes"))
|
||||||
|
return
|
||||||
|
|
||||||
|
if client_name:
|
||||||
|
try:
|
||||||
|
client = Client.objects.get(name=client_name)
|
||||||
|
except Client.DoesNotExist:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.ERROR(f"Client {client_name} doesn't exist")
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
agents = Agent.objects.filter(site__client=client)
|
||||||
|
|
||||||
|
elif site_name:
|
||||||
|
try:
|
||||||
|
site = Site.objects.get(name=site_name)
|
||||||
|
except Site.DoesNotExist:
|
||||||
|
self.stdout.write(self.style.ERROR(f"Site {site_name} doesn't exist"))
|
||||||
|
return
|
||||||
|
|
||||||
|
agents = Agent.objects.filter(site=site)
|
||||||
|
|
||||||
|
elif all_agents:
|
||||||
|
agents = Agent.objects.all()
|
||||||
|
|
||||||
|
if agents:
|
||||||
|
if offline:
|
||||||
|
agents.update(offline_time=time)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
f"Changed offline time on {len(agents)} agents to {time} minutes"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if overdue:
|
||||||
|
agents.update(overdue_time=time)
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
f"Changed overdue time on {len(agents)} agents to {time} minutes"
|
||||||
|
)
|
||||||
|
)
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Shows online agents that are not on the latest version"
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(
|
||||||
|
"pk", "version", "last_seen", "overdue_time", "offline_time"
|
||||||
|
)
|
||||||
|
agents = [i for i in q if i.status == "online"]
|
||||||
|
for agent in agents:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(f"{agent.hostname} - v{agent.version}")
|
||||||
|
)
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-04 03:57
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('alerts', '0006_auto_20210217_1736'),
|
||||||
|
('agents', '0030_agent_offline_time'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agent',
|
||||||
|
name='alert_template',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='alerts.alerttemplate'),
|
||||||
|
),
|
||||||
|
]
|
||||||
24
api/tacticalrmm/agents/migrations/0032_agentcustomfield.py
Normal file
24
api/tacticalrmm/agents/migrations/0032_agentcustomfield.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0014_customfield'),
|
||||||
|
('agents', '0031_agent_alert_template'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='AgentCustomField',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('value', models.TextField(blank=True, null=True)),
|
||||||
|
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='agents.agent')),
|
||||||
|
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='agent_fields', to='core.customfield')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-29 02:51
|
||||||
|
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0032_agentcustomfield'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agentcustomfield',
|
||||||
|
name='multiple_value',
|
||||||
|
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-29 03:01
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0033_agentcustomfield_multiple_value'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agentcustomfield',
|
||||||
|
name='checkbox_value',
|
||||||
|
field=models.BooleanField(blank=True, default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
23
api/tacticalrmm/agents/migrations/0035_auto_20210329_1709.py
Normal file
23
api/tacticalrmm/agents/migrations/0035_auto_20210329_1709.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0034_agentcustomfield_checkbox_value'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='agentcustomfield',
|
||||||
|
old_name='checkbox_value',
|
||||||
|
new_name='bool_value',
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='agentcustomfield',
|
||||||
|
old_name='value',
|
||||||
|
new_name='string_value',
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-04-17 01:28
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0035_auto_20210329_1709'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='agent',
|
||||||
|
name='block_policy_inheritance',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -4,7 +4,7 @@ import re
|
|||||||
import time
|
import time
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
from typing import Any, List, Union
|
from typing import Any
|
||||||
|
|
||||||
import msgpack
|
import msgpack
|
||||||
import validators
|
import validators
|
||||||
@@ -13,14 +13,13 @@ from Crypto.Hash import SHA3_384
|
|||||||
from Crypto.Random import get_random_bytes
|
from Crypto.Random import get_random_bytes
|
||||||
from Crypto.Util.Padding import pad
|
from Crypto.Util.Padding import pad
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from nats.aio.client import Client as NATS
|
from nats.aio.client import Client as NATS
|
||||||
from nats.aio.errors import ErrTimeout
|
from nats.aio.errors import ErrTimeout
|
||||||
from packaging import version as pyver
|
|
||||||
|
|
||||||
from alerts.models import AlertTemplate
|
|
||||||
from core.models import TZ_CHOICES, CoreSettings
|
from core.models import TZ_CHOICES, CoreSettings
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
@@ -64,6 +63,14 @@ class Agent(BaseAuditModel):
|
|||||||
max_length=255, choices=TZ_CHOICES, null=True, blank=True
|
max_length=255, choices=TZ_CHOICES, null=True, blank=True
|
||||||
)
|
)
|
||||||
maintenance_mode = models.BooleanField(default=False)
|
maintenance_mode = models.BooleanField(default=False)
|
||||||
|
block_policy_inheritance = models.BooleanField(default=False)
|
||||||
|
alert_template = models.ForeignKey(
|
||||||
|
"alerts.AlertTemplate",
|
||||||
|
related_name="agents",
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
)
|
||||||
site = models.ForeignKey(
|
site = models.ForeignKey(
|
||||||
"clients.Site",
|
"clients.Site",
|
||||||
related_name="agents",
|
related_name="agents",
|
||||||
@@ -85,14 +92,14 @@ class Agent(BaseAuditModel):
|
|||||||
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||||
|
|
||||||
# check if new agent has been create
|
# check if new agent has been created
|
||||||
# or check if policy have changed on agent
|
# or check if policy have changed on agent
|
||||||
# or if site has changed on agent and if so generate-policies
|
# or if site has changed on agent and if so generate-policies
|
||||||
if (
|
if (
|
||||||
not old_agent
|
not old_agent
|
||||||
or old_agent
|
or (old_agent and old_agent.policy != self.policy)
|
||||||
and old_agent.policy != self.policy
|
or (old_agent.site != self.site)
|
||||||
or old_agent.site != self.site
|
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
|
||||||
):
|
):
|
||||||
self.generate_checks_from_policies()
|
self.generate_checks_from_policies()
|
||||||
self.generate_tasks_from_policies()
|
self.generate_tasks_from_policies()
|
||||||
@@ -104,14 +111,6 @@ class Agent(BaseAuditModel):
|
|||||||
def client(self):
|
def client(self):
|
||||||
return self.site.client
|
return self.site.client
|
||||||
|
|
||||||
@property
|
|
||||||
def has_nats(self):
|
|
||||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def has_gotasks(self):
|
|
||||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timezone(self):
|
def timezone(self):
|
||||||
# return the default timezone unless the timezone is explicity set per agent
|
# return the default timezone unless the timezone is explicity set per agent
|
||||||
@@ -164,25 +163,32 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def has_patches_pending(self):
|
def has_patches_pending(self):
|
||||||
return self.winupdates.filter(action="approve").filter(installed=False).exists()
|
return self.winupdates.filter(action="approve").filter(installed=False).exists() # type: ignore
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def checks(self):
|
def checks(self):
|
||||||
total, passing, failing = 0, 0, 0
|
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
||||||
|
|
||||||
if self.agentchecks.exists():
|
if self.agentchecks.exists(): # type: ignore
|
||||||
for i in self.agentchecks.all():
|
for i in self.agentchecks.all(): # type: ignore
|
||||||
total += 1
|
total += 1
|
||||||
if i.status == "passing":
|
if i.status == "passing":
|
||||||
passing += 1
|
passing += 1
|
||||||
elif i.status == "failing":
|
elif i.status == "failing":
|
||||||
failing += 1
|
if i.alert_severity == "error":
|
||||||
|
failing += 1
|
||||||
|
elif i.alert_severity == "warning":
|
||||||
|
warning += 1
|
||||||
|
elif i.alert_severity == "info":
|
||||||
|
info += 1
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
"total": total,
|
"total": total,
|
||||||
"passing": passing,
|
"passing": passing,
|
||||||
"failing": failing,
|
"failing": failing,
|
||||||
"has_failing_checks": failing > 0,
|
"warning": warning,
|
||||||
|
"info": info,
|
||||||
|
"has_failing_checks": failing > 0 or warning > 0,
|
||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -197,6 +203,27 @@ class Agent(BaseAuditModel):
|
|||||||
except:
|
except:
|
||||||
return ["unknown cpu model"]
|
return ["unknown cpu model"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def graphics(self):
|
||||||
|
ret, mrda = [], []
|
||||||
|
try:
|
||||||
|
graphics = self.wmi_detail["graphics"]
|
||||||
|
for i in graphics:
|
||||||
|
caption = [x["Caption"] for x in i if "Caption" in x][0]
|
||||||
|
if "microsoft remote display adapter" in caption.lower():
|
||||||
|
mrda.append("yes")
|
||||||
|
continue
|
||||||
|
|
||||||
|
ret.append([x["Caption"] for x in i if "Caption" in x][0])
|
||||||
|
|
||||||
|
# only return this if no other graphics cards
|
||||||
|
if not ret and mrda:
|
||||||
|
return "Microsoft Remote Display Adapter"
|
||||||
|
|
||||||
|
return ", ".join(ret)
|
||||||
|
except:
|
||||||
|
return "Graphics info requires agent v1.4.14"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def local_ips(self):
|
def local_ips(self):
|
||||||
ret = []
|
ret = []
|
||||||
@@ -236,11 +263,17 @@ class Agent(BaseAuditModel):
|
|||||||
make = [x["Manufacturer"] for x in mobo if "Manufacturer" in x][0]
|
make = [x["Manufacturer"] for x in mobo if "Manufacturer" in x][0]
|
||||||
model = [x["Product"] for x in mobo if "Product" in x][0]
|
model = [x["Product"] for x in mobo if "Product" in x][0]
|
||||||
|
|
||||||
|
if make.lower() == "lenovo":
|
||||||
|
sysfam = [x["SystemFamily"] for x in comp_sys if "SystemFamily" in x][0]
|
||||||
|
if "to be filled" not in sysfam.lower():
|
||||||
|
model = sysfam
|
||||||
|
|
||||||
return f"{make} {model}"
|
return f"{make} {model}"
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||||
return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
|
return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@@ -270,10 +303,24 @@ class Agent(BaseAuditModel):
|
|||||||
except:
|
except:
|
||||||
return ["unknown disk"]
|
return ["unknown disk"]
|
||||||
|
|
||||||
|
def check_run_interval(self) -> int:
|
||||||
|
interval = self.check_interval
|
||||||
|
# determine if any agent checks have a custom interval and set the lowest interval
|
||||||
|
for check in self.agentchecks.filter(overriden_by_policy=False): # type: ignore
|
||||||
|
if check.run_interval and check.run_interval < interval:
|
||||||
|
|
||||||
|
# don't allow check runs less than 15s
|
||||||
|
if check.run_interval < 15:
|
||||||
|
interval = 15
|
||||||
|
else:
|
||||||
|
interval = check.run_interval
|
||||||
|
|
||||||
|
return interval
|
||||||
|
|
||||||
def run_script(
|
def run_script(
|
||||||
self,
|
self,
|
||||||
scriptpk: int,
|
scriptpk: int,
|
||||||
args: List[str] = [],
|
args: list[str] = [],
|
||||||
timeout: int = 120,
|
timeout: int = 120,
|
||||||
full: bool = False,
|
full: bool = False,
|
||||||
wait: bool = False,
|
wait: bool = False,
|
||||||
@@ -283,10 +330,13 @@ class Agent(BaseAuditModel):
|
|||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
|
|
||||||
script = Script.objects.get(pk=scriptpk)
|
script = Script.objects.get(pk=scriptpk)
|
||||||
|
|
||||||
|
parsed_args = script.parse_script_args(self, script.shell, args)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"func": "runscriptfull" if full else "runscript",
|
"func": "runscriptfull" if full else "runscript",
|
||||||
"timeout": timeout,
|
"timeout": timeout,
|
||||||
"script_args": args,
|
"script_args": parsed_args,
|
||||||
"payload": {
|
"payload": {
|
||||||
"code": script.code,
|
"code": script.code,
|
||||||
"shell": script.shell,
|
"shell": script.shell,
|
||||||
@@ -295,10 +345,10 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
running_agent = self
|
running_agent = self
|
||||||
if run_on_any:
|
if run_on_any:
|
||||||
nats_ping = {"func": "ping", "timeout": 1}
|
nats_ping = {"func": "ping"}
|
||||||
|
|
||||||
# try on self first
|
# try on self first
|
||||||
r = asyncio.run(self.nats_cmd(nats_ping))
|
r = asyncio.run(self.nats_cmd(nats_ping, timeout=1))
|
||||||
|
|
||||||
if r == "pong":
|
if r == "pong":
|
||||||
running_agent = self
|
running_agent = self
|
||||||
@@ -306,13 +356,13 @@ class Agent(BaseAuditModel):
|
|||||||
online = [
|
online = [
|
||||||
agent
|
agent
|
||||||
for agent in Agent.objects.only(
|
for agent in Agent.objects.only(
|
||||||
"pk", "last_seen", "overdue_time", "offline_time"
|
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||||
)
|
)
|
||||||
if agent.status == "online"
|
if agent.status == "online"
|
||||||
]
|
]
|
||||||
|
|
||||||
for agent in online:
|
for agent in online:
|
||||||
r = asyncio.run(agent.nats_cmd(nats_ping))
|
r = asyncio.run(agent.nats_cmd(nats_ping, timeout=1))
|
||||||
if r == "pong":
|
if r == "pong":
|
||||||
running_agent = agent
|
running_agent = agent
|
||||||
break
|
break
|
||||||
@@ -333,27 +383,27 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
updates = list()
|
updates = list()
|
||||||
if patch_policy.critical == "approve":
|
if patch_policy.critical == "approve":
|
||||||
updates += self.winupdates.filter(
|
updates += self.winupdates.filter( # type: ignore
|
||||||
severity="Critical", installed=False
|
severity="Critical", installed=False
|
||||||
).exclude(action="approve")
|
).exclude(action="approve")
|
||||||
|
|
||||||
if patch_policy.important == "approve":
|
if patch_policy.important == "approve":
|
||||||
updates += self.winupdates.filter(
|
updates += self.winupdates.filter( # type: ignore
|
||||||
severity="Important", installed=False
|
severity="Important", installed=False
|
||||||
).exclude(action="approve")
|
).exclude(action="approve")
|
||||||
|
|
||||||
if patch_policy.moderate == "approve":
|
if patch_policy.moderate == "approve":
|
||||||
updates += self.winupdates.filter(
|
updates += self.winupdates.filter( # type: ignore
|
||||||
severity="Moderate", installed=False
|
severity="Moderate", installed=False
|
||||||
).exclude(action="approve")
|
).exclude(action="approve")
|
||||||
|
|
||||||
if patch_policy.low == "approve":
|
if patch_policy.low == "approve":
|
||||||
updates += self.winupdates.filter(severity="Low", installed=False).exclude(
|
updates += self.winupdates.filter(severity="Low", installed=False).exclude( # type: ignore
|
||||||
action="approve"
|
action="approve"
|
||||||
)
|
)
|
||||||
|
|
||||||
if patch_policy.other == "approve":
|
if patch_policy.other == "approve":
|
||||||
updates += self.winupdates.filter(severity="", installed=False).exclude(
|
updates += self.winupdates.filter(severity="", installed=False).exclude( # type: ignore
|
||||||
action="approve"
|
action="approve"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -368,7 +418,7 @@ class Agent(BaseAuditModel):
|
|||||||
site = self.site
|
site = self.site
|
||||||
core_settings = CoreSettings.objects.first()
|
core_settings = CoreSettings.objects.first()
|
||||||
patch_policy = None
|
patch_policy = None
|
||||||
agent_policy = self.winupdatepolicy.get()
|
agent_policy = self.winupdatepolicy.get() # type: ignore
|
||||||
|
|
||||||
if self.monitoring_type == "server":
|
if self.monitoring_type == "server":
|
||||||
# check agent policy first which should override client or site policy
|
# check agent policy first which should override client or site policy
|
||||||
@@ -377,21 +427,34 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
# check site policy if agent policy doesn't have one
|
# check site policy if agent policy doesn't have one
|
||||||
elif site.server_policy and site.server_policy.winupdatepolicy.exists():
|
elif site.server_policy and site.server_policy.winupdatepolicy.exists():
|
||||||
patch_policy = site.server_policy.winupdatepolicy.get()
|
# make sure agent isn;t blocking policy inheritance
|
||||||
|
if not self.block_policy_inheritance:
|
||||||
|
patch_policy = site.server_policy.winupdatepolicy.get()
|
||||||
|
|
||||||
# if site doesn't have a patch policy check the client
|
# if site doesn't have a patch policy check the client
|
||||||
elif (
|
elif (
|
||||||
site.client.server_policy
|
site.client.server_policy
|
||||||
and site.client.server_policy.winupdatepolicy.exists()
|
and site.client.server_policy.winupdatepolicy.exists()
|
||||||
):
|
):
|
||||||
patch_policy = site.client.server_policy.winupdatepolicy.get()
|
# make sure agent and site are not blocking inheritance
|
||||||
|
if (
|
||||||
|
not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
):
|
||||||
|
patch_policy = site.client.server_policy.winupdatepolicy.get()
|
||||||
|
|
||||||
# if patch policy still doesn't exist check default policy
|
# if patch policy still doesn't exist check default policy
|
||||||
elif (
|
elif (
|
||||||
core_settings.server_policy
|
core_settings.server_policy
|
||||||
and core_settings.server_policy.winupdatepolicy.exists()
|
and core_settings.server_policy.winupdatepolicy.exists()
|
||||||
):
|
):
|
||||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
# make sure agent site and client are not blocking inheritance
|
||||||
|
if (
|
||||||
|
not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
and not site.client.block_policy_inheritance
|
||||||
|
):
|
||||||
|
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||||
|
|
||||||
elif self.monitoring_type == "workstation":
|
elif self.monitoring_type == "workstation":
|
||||||
# check agent policy first which should override client or site policy
|
# check agent policy first which should override client or site policy
|
||||||
@@ -402,21 +465,36 @@ class Agent(BaseAuditModel):
|
|||||||
site.workstation_policy
|
site.workstation_policy
|
||||||
and site.workstation_policy.winupdatepolicy.exists()
|
and site.workstation_policy.winupdatepolicy.exists()
|
||||||
):
|
):
|
||||||
patch_policy = site.workstation_policy.winupdatepolicy.get()
|
# make sure agent isn;t blocking policy inheritance
|
||||||
|
if not self.block_policy_inheritance:
|
||||||
|
patch_policy = site.workstation_policy.winupdatepolicy.get()
|
||||||
|
|
||||||
# if site doesn't have a patch policy check the client
|
# if site doesn't have a patch policy check the client
|
||||||
elif (
|
elif (
|
||||||
site.client.workstation_policy
|
site.client.workstation_policy
|
||||||
and site.client.workstation_policy.winupdatepolicy.exists()
|
and site.client.workstation_policy.winupdatepolicy.exists()
|
||||||
):
|
):
|
||||||
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
|
# make sure agent and site are not blocking inheritance
|
||||||
|
if (
|
||||||
|
not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
):
|
||||||
|
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
|
||||||
|
|
||||||
# if patch policy still doesn't exist check default policy
|
# if patch policy still doesn't exist check default policy
|
||||||
elif (
|
elif (
|
||||||
core_settings.workstation_policy
|
core_settings.workstation_policy
|
||||||
and core_settings.workstation_policy.winupdatepolicy.exists()
|
and core_settings.workstation_policy.winupdatepolicy.exists()
|
||||||
):
|
):
|
||||||
patch_policy = core_settings.workstation_policy.winupdatepolicy.get()
|
# make sure agent site and client are not blocking inheritance
|
||||||
|
if (
|
||||||
|
not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
and not site.client.block_policy_inheritance
|
||||||
|
):
|
||||||
|
patch_policy = (
|
||||||
|
core_settings.workstation_policy.winupdatepolicy.get()
|
||||||
|
)
|
||||||
|
|
||||||
# if policy still doesn't exist return the agent patch policy
|
# if policy still doesn't exist return the agent patch policy
|
||||||
if not patch_policy:
|
if not patch_policy:
|
||||||
@@ -453,16 +531,16 @@ class Agent(BaseAuditModel):
|
|||||||
|
|
||||||
return patch_policy
|
return patch_policy
|
||||||
|
|
||||||
def get_approved_update_guids(self) -> List[str]:
|
def get_approved_update_guids(self) -> list[str]:
|
||||||
return list(
|
return list(
|
||||||
self.winupdates.filter(action="approve", installed=False).values_list(
|
self.winupdates.filter(action="approve", installed=False).values_list( # type: ignore
|
||||||
"guid", flat=True
|
"guid", flat=True
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# returns alert template assigned in the following order: policy, site, client, global
|
# sets alert template assigned in the following order: policy, site, client, global
|
||||||
# will return None if nothing is found
|
# sets None if nothing is found
|
||||||
def get_alert_template(self) -> Union[AlertTemplate, None]:
|
def set_alert_template(self):
|
||||||
|
|
||||||
site = self.site
|
site = self.site
|
||||||
client = self.client
|
client = self.client
|
||||||
@@ -483,6 +561,7 @@ class Agent(BaseAuditModel):
|
|||||||
and site.server_policy
|
and site.server_policy
|
||||||
and site.server_policy.alert_template
|
and site.server_policy.alert_template
|
||||||
and site.server_policy.alert_template.is_active
|
and site.server_policy.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
):
|
):
|
||||||
templates.append(site.server_policy.alert_template)
|
templates.append(site.server_policy.alert_template)
|
||||||
if (
|
if (
|
||||||
@@ -490,6 +569,7 @@ class Agent(BaseAuditModel):
|
|||||||
and site.workstation_policy
|
and site.workstation_policy
|
||||||
and site.workstation_policy.alert_template
|
and site.workstation_policy.alert_template
|
||||||
and site.workstation_policy.alert_template.is_active
|
and site.workstation_policy.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
):
|
):
|
||||||
templates.append(site.workstation_policy.alert_template)
|
templates.append(site.workstation_policy.alert_template)
|
||||||
|
|
||||||
@@ -503,6 +583,8 @@ class Agent(BaseAuditModel):
|
|||||||
and client.server_policy
|
and client.server_policy
|
||||||
and client.server_policy.alert_template
|
and client.server_policy.alert_template
|
||||||
and client.server_policy.alert_template.is_active
|
and client.server_policy.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
):
|
):
|
||||||
templates.append(client.server_policy.alert_template)
|
templates.append(client.server_policy.alert_template)
|
||||||
if (
|
if (
|
||||||
@@ -510,15 +592,28 @@ class Agent(BaseAuditModel):
|
|||||||
and client.workstation_policy
|
and client.workstation_policy
|
||||||
and client.workstation_policy.alert_template
|
and client.workstation_policy.alert_template
|
||||||
and client.workstation_policy.alert_template.is_active
|
and client.workstation_policy.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
):
|
):
|
||||||
templates.append(client.workstation_policy.alert_template)
|
templates.append(client.workstation_policy.alert_template)
|
||||||
|
|
||||||
# check if alert template is on client and return
|
# check if alert template is on client and return
|
||||||
if client.alert_template and client.alert_template.is_active:
|
if (
|
||||||
|
client.alert_template
|
||||||
|
and client.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
):
|
||||||
templates.append(client.alert_template)
|
templates.append(client.alert_template)
|
||||||
|
|
||||||
# check if alert template is applied globally and return
|
# check if alert template is applied globally and return
|
||||||
if core.alert_template and core.alert_template.is_active:
|
if (
|
||||||
|
core.alert_template
|
||||||
|
and core.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
and not client.block_policy_inheritance
|
||||||
|
):
|
||||||
templates.append(core.alert_template)
|
templates.append(core.alert_template)
|
||||||
|
|
||||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||||
@@ -527,6 +622,9 @@ class Agent(BaseAuditModel):
|
|||||||
and core.server_policy
|
and core.server_policy
|
||||||
and core.server_policy.alert_template
|
and core.server_policy.alert_template
|
||||||
and core.server_policy.alert_template.is_active
|
and core.server_policy.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
and not client.block_policy_inheritance
|
||||||
):
|
):
|
||||||
templates.append(core.server_policy.alert_template)
|
templates.append(core.server_policy.alert_template)
|
||||||
if (
|
if (
|
||||||
@@ -534,6 +632,9 @@ class Agent(BaseAuditModel):
|
|||||||
and core.workstation_policy
|
and core.workstation_policy
|
||||||
and core.workstation_policy.alert_template
|
and core.workstation_policy.alert_template
|
||||||
and core.workstation_policy.alert_template.is_active
|
and core.workstation_policy.alert_template.is_active
|
||||||
|
and not self.block_policy_inheritance
|
||||||
|
and not site.block_policy_inheritance
|
||||||
|
and not client.block_policy_inheritance
|
||||||
):
|
):
|
||||||
templates.append(core.workstation_policy.alert_template)
|
templates.append(core.workstation_policy.alert_template)
|
||||||
|
|
||||||
@@ -562,16 +663,23 @@ class Agent(BaseAuditModel):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
# save alert_template to agent cache field
|
||||||
|
self.alert_template = template
|
||||||
|
self.save()
|
||||||
|
|
||||||
return template
|
return template
|
||||||
|
|
||||||
# no alert templates found or agent has been excluded
|
# no alert templates found or agent has been excluded
|
||||||
|
self.alert_template = None
|
||||||
|
self.save()
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def generate_checks_from_policies(self):
|
def generate_checks_from_policies(self):
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
|
|
||||||
# Clear agent checks that have overriden_by_policy set
|
# Clear agent checks that have overriden_by_policy set
|
||||||
self.agentchecks.update(overriden_by_policy=False)
|
self.agentchecks.update(overriden_by_policy=False) # type: ignore
|
||||||
|
|
||||||
# Generate checks based on policies
|
# Generate checks based on policies
|
||||||
Policy.generate_policy_checks(self)
|
Policy.generate_policy_checks(self)
|
||||||
@@ -606,7 +714,7 @@ class Agent(BaseAuditModel):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return "err"
|
return "err"
|
||||||
|
|
||||||
async def nats_cmd(self, data, timeout=30, wait=True):
|
async def nats_cmd(self, data: dict, timeout: int = 30, wait: bool = True):
|
||||||
nc = NATS()
|
nc = NATS()
|
||||||
options = {
|
options = {
|
||||||
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||||
@@ -628,7 +736,11 @@ class Agent(BaseAuditModel):
|
|||||||
except ErrTimeout:
|
except ErrTimeout:
|
||||||
ret = "timeout"
|
ret = "timeout"
|
||||||
else:
|
else:
|
||||||
ret = msgpack.loads(msg.data)
|
try:
|
||||||
|
ret = msgpack.loads(msg.data) # type: ignore
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
ret = str(e)
|
||||||
|
|
||||||
await nc.close()
|
await nc.close()
|
||||||
return ret
|
return ret
|
||||||
@@ -650,12 +762,12 @@ class Agent(BaseAuditModel):
|
|||||||
def delete_superseded_updates(self):
|
def delete_superseded_updates(self):
|
||||||
try:
|
try:
|
||||||
pks = [] # list of pks to delete
|
pks = [] # list of pks to delete
|
||||||
kbs = list(self.winupdates.values_list("kb", flat=True))
|
kbs = list(self.winupdates.values_list("kb", flat=True)) # type: ignore
|
||||||
d = Counter(kbs)
|
d = Counter(kbs)
|
||||||
dupes = [k for k, v in d.items() if v > 1]
|
dupes = [k for k, v in d.items() if v > 1]
|
||||||
|
|
||||||
for dupe in dupes:
|
for dupe in dupes:
|
||||||
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True)
|
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True) # type: ignore
|
||||||
# extract the version from the title and sort from oldest to newest
|
# extract the version from the title and sort from oldest to newest
|
||||||
# skip if no version info is available therefore nothing to parse
|
# skip if no version info is available therefore nothing to parse
|
||||||
try:
|
try:
|
||||||
@@ -668,194 +780,33 @@ class Agent(BaseAuditModel):
|
|||||||
continue
|
continue
|
||||||
# append all but the latest version to our list of pks to delete
|
# append all but the latest version to our list of pks to delete
|
||||||
for ver in sorted_vers[:-1]:
|
for ver in sorted_vers[:-1]:
|
||||||
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver)
|
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver) # type: ignore
|
||||||
pks.append(q.first().pk)
|
pks.append(q.first().pk)
|
||||||
|
|
||||||
pks = list(set(pks))
|
pks = list(set(pks))
|
||||||
self.winupdates.filter(pk__in=pks).delete()
|
self.winupdates.filter(pk__in=pks).delete() # type: ignore
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# define how the agent should handle pending actions
|
def should_create_alert(self, alert_template=None):
|
||||||
def handle_pending_actions(self):
|
return (
|
||||||
pending_actions = self.pendingactions.filter(status="pending")
|
self.overdue_dashboard_alert
|
||||||
|
or self.overdue_email_alert
|
||||||
for action in pending_actions:
|
or self.overdue_text_alert
|
||||||
if action.action_type == "taskaction":
|
or (
|
||||||
from autotasks.tasks import (
|
|
||||||
create_win_task_schedule,
|
|
||||||
delete_win_task_schedule,
|
|
||||||
enable_or_disable_win_task,
|
|
||||||
)
|
|
||||||
|
|
||||||
task_id = action.details["task_id"]
|
|
||||||
|
|
||||||
if action.details["action"] == "taskcreate":
|
|
||||||
create_win_task_schedule.delay(task_id, pending_action=action.id)
|
|
||||||
elif action.details["action"] == "tasktoggle":
|
|
||||||
enable_or_disable_win_task.delay(
|
|
||||||
task_id, action.details["value"], pending_action=action.id
|
|
||||||
)
|
|
||||||
elif action.details["action"] == "taskdelete":
|
|
||||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
|
||||||
|
|
||||||
# for clearing duplicate pending actions on agent
|
|
||||||
def remove_matching_pending_task_actions(self, task_id):
|
|
||||||
# remove any other pending actions on agent with same task_id
|
|
||||||
for action in self.pendingactions.exclude(status="completed"):
|
|
||||||
if action.details["task_id"] == task_id:
|
|
||||||
action.delete()
|
|
||||||
|
|
||||||
def handle_alert(self, checkin: bool = False) -> None:
|
|
||||||
from agents.tasks import (
|
|
||||||
agent_outage_email_task,
|
|
||||||
agent_outage_sms_task,
|
|
||||||
agent_recovery_email_task,
|
|
||||||
agent_recovery_sms_task,
|
|
||||||
)
|
|
||||||
from alerts.models import Alert
|
|
||||||
|
|
||||||
# return if agent is in maintenace mode
|
|
||||||
if self.maintenance_mode:
|
|
||||||
return
|
|
||||||
|
|
||||||
alert_template = self.get_alert_template()
|
|
||||||
|
|
||||||
# called when agent is back online
|
|
||||||
if checkin:
|
|
||||||
if Alert.objects.filter(agent=self, resolved=False).exists():
|
|
||||||
|
|
||||||
# resolve alert if exists
|
|
||||||
alert = Alert.objects.get(agent=self, resolved=False)
|
|
||||||
alert.resolve()
|
|
||||||
|
|
||||||
# check if a resolved notification should be emailed
|
|
||||||
if (
|
|
||||||
not alert.resolved_email_sent
|
|
||||||
and alert_template
|
|
||||||
and alert_template.agent_email_on_resolved
|
|
||||||
or self.overdue_email_alert
|
|
||||||
):
|
|
||||||
agent_recovery_email_task.delay(pk=alert.pk)
|
|
||||||
|
|
||||||
# check if a resolved notification should be texted
|
|
||||||
if (
|
|
||||||
not alert.resolved_sms_sent
|
|
||||||
and alert_template
|
|
||||||
and alert_template.agent_text_on_resolved
|
|
||||||
or self.overdue_text_alert
|
|
||||||
):
|
|
||||||
agent_recovery_sms_task.delay(pk=alert.pk)
|
|
||||||
|
|
||||||
# check if any scripts should be run
|
|
||||||
if (
|
|
||||||
not alert.resolved_action_run
|
|
||||||
and alert_template
|
|
||||||
and alert_template.resolved_action
|
|
||||||
):
|
|
||||||
r = self.run_script(
|
|
||||||
scriptpk=alert_template.resolved_action.pk,
|
|
||||||
args=alert_template.resolved_action_args,
|
|
||||||
timeout=alert_template.resolved_action_timeout,
|
|
||||||
wait=True,
|
|
||||||
full=True,
|
|
||||||
run_on_any=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# command was successful
|
|
||||||
if type(r) == dict:
|
|
||||||
alert.resolved_action_retcode = r["retcode"]
|
|
||||||
alert.resolved_action_stdout = r["stdout"]
|
|
||||||
alert.resolved_action_stderr = r["stderr"]
|
|
||||||
alert.resolved_action_execution_time = "{:.4f}".format(
|
|
||||||
r["execution_time"]
|
|
||||||
)
|
|
||||||
alert.resolved_action_run = djangotime.now()
|
|
||||||
alert.save()
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Resolved action: {alert_template.resolved_action} failed to run on any agent for {self.hostname} resolved outage"
|
|
||||||
)
|
|
||||||
|
|
||||||
# called when agent is offline
|
|
||||||
else:
|
|
||||||
# check if alert hasn't been created yet so create it
|
|
||||||
if not Alert.objects.filter(agent=self, resolved=False).exists():
|
|
||||||
|
|
||||||
alert = Alert.create_availability_alert(self)
|
|
||||||
|
|
||||||
# add a null check history to allow gaps in graph
|
|
||||||
for check in self.agentchecks.all():
|
|
||||||
check.add_check_history(None)
|
|
||||||
else:
|
|
||||||
alert = Alert.objects.get(agent=self, resolved=False)
|
|
||||||
|
|
||||||
# create dashboard alert if enabled
|
|
||||||
if (
|
|
||||||
alert_template
|
alert_template
|
||||||
and alert_template.agent_always_alert
|
and (
|
||||||
or self.overdue_dashboard_alert
|
alert_template.agent_always_alert
|
||||||
):
|
or alert_template.agent_always_email
|
||||||
alert.hidden = False
|
or alert_template.agent_always_text
|
||||||
alert.save()
|
|
||||||
|
|
||||||
# send email alert if enabled
|
|
||||||
if (
|
|
||||||
not alert.email_sent
|
|
||||||
and alert_template
|
|
||||||
and alert_template.agent_always_email
|
|
||||||
or self.overdue_email_alert
|
|
||||||
):
|
|
||||||
agent_outage_email_task.delay(
|
|
||||||
pk=alert.pk,
|
|
||||||
alert_interval=alert_template.check_periodic_alert_days
|
|
||||||
if alert_template
|
|
||||||
else None,
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
# send text message if enabled
|
)
|
||||||
if (
|
|
||||||
not alert.sms_sent
|
|
||||||
and alert_template
|
|
||||||
and alert_template.agent_always_text
|
|
||||||
or self.overdue_text_alert
|
|
||||||
):
|
|
||||||
agent_outage_sms_task.delay(
|
|
||||||
pk=alert.pk,
|
|
||||||
alert_interval=alert_template.check_periodic_alert_days
|
|
||||||
if alert_template
|
|
||||||
else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# check if any scripts should be run
|
|
||||||
if not alert.action_run and alert_template and alert_template.action:
|
|
||||||
r = self.run_script(
|
|
||||||
scriptpk=alert_template.action.pk,
|
|
||||||
args=alert_template.action_args,
|
|
||||||
timeout=alert_template.action_timeout,
|
|
||||||
wait=True,
|
|
||||||
full=True,
|
|
||||||
run_on_any=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# command was successful
|
|
||||||
if isinstance(r, dict):
|
|
||||||
alert.action_retcode = r["retcode"]
|
|
||||||
alert.action_stdout = r["stdout"]
|
|
||||||
alert.action_stderr = r["stderr"]
|
|
||||||
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
|
||||||
alert.action_run = djangotime.now()
|
|
||||||
alert.save()
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {self.hostname} outage"
|
|
||||||
)
|
|
||||||
|
|
||||||
def send_outage_email(self):
|
def send_outage_email(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.get_alert_template()
|
|
||||||
CORE.send_mail(
|
CORE.send_mail(
|
||||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||||
(
|
(
|
||||||
@@ -864,14 +815,13 @@ class Agent(BaseAuditModel):
|
|||||||
f"agent {self.hostname} "
|
f"agent {self.hostname} "
|
||||||
"within the expected time."
|
"within the expected time."
|
||||||
),
|
),
|
||||||
alert_template=alert_template,
|
alert_template=self.alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_recovery_email(self):
|
def send_recovery_email(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.get_alert_template()
|
|
||||||
CORE.send_mail(
|
CORE.send_mail(
|
||||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||||
(
|
(
|
||||||
@@ -880,27 +830,25 @@ class Agent(BaseAuditModel):
|
|||||||
f"agent {self.hostname} "
|
f"agent {self.hostname} "
|
||||||
"after an interruption in data transmission."
|
"after an interruption in data transmission."
|
||||||
),
|
),
|
||||||
alert_template=alert_template,
|
alert_template=self.alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_outage_sms(self):
|
def send_outage_sms(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
alert_template = self.get_alert_template()
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
CORE.send_sms(
|
CORE.send_sms(
|
||||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||||
alert_template=alert_template,
|
alert_template=self.alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_recovery_sms(self):
|
def send_recovery_sms(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.get_alert_template()
|
|
||||||
CORE.send_sms(
|
CORE.send_sms(
|
||||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||||
alert_template=alert_template,
|
alert_template=self.alert_template,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -926,12 +874,6 @@ class RecoveryAction(models.Model):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{self.agent.hostname} - {self.mode}"
|
return f"{self.agent.hostname} - {self.mode}"
|
||||||
|
|
||||||
def send(self):
|
|
||||||
ret = {"recovery": self.mode}
|
|
||||||
if self.mode == "command":
|
|
||||||
ret["cmd"] = self.command
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class Note(models.Model):
|
class Note(models.Model):
|
||||||
agent = models.ForeignKey(
|
agent = models.ForeignKey(
|
||||||
@@ -951,3 +893,38 @@ class Note(models.Model):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.agent.hostname
|
return self.agent.hostname
|
||||||
|
|
||||||
|
|
||||||
|
class AgentCustomField(models.Model):
|
||||||
|
agent = models.ForeignKey(
|
||||||
|
Agent,
|
||||||
|
related_name="custom_fields",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
field = models.ForeignKey(
|
||||||
|
"core.CustomField",
|
||||||
|
related_name="agent_fields",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
string_value = models.TextField(null=True, blank=True)
|
||||||
|
bool_value = models.BooleanField(blank=True, default=False)
|
||||||
|
multiple_value = ArrayField(
|
||||||
|
models.TextField(null=True, blank=True),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.field
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self):
|
||||||
|
if self.field.type == "multiple":
|
||||||
|
return self.multiple_value
|
||||||
|
elif self.field.type == "checkbox":
|
||||||
|
return self.bool_value
|
||||||
|
else:
|
||||||
|
return self.string_value
|
||||||
|
|||||||
63
api/tacticalrmm/agents/permissions.py
Normal file
63
api/tacticalrmm/agents/permissions.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm
|
||||||
|
|
||||||
|
|
||||||
|
class MeshPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_use_mesh")
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_uninstall_agents")
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateAgentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_update_agents")
|
||||||
|
|
||||||
|
|
||||||
|
class EditAgentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_edit_agent")
|
||||||
|
|
||||||
|
|
||||||
|
class ManageProcPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_manage_procs")
|
||||||
|
|
||||||
|
|
||||||
|
class EvtLogPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_view_eventlogs")
|
||||||
|
|
||||||
|
|
||||||
|
class SendCMDPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_send_cmd")
|
||||||
|
|
||||||
|
|
||||||
|
class RebootAgentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_reboot_agents")
|
||||||
|
|
||||||
|
|
||||||
|
class InstallAgentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_install_agents")
|
||||||
|
|
||||||
|
|
||||||
|
class RunScriptPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_run_scripts")
|
||||||
|
|
||||||
|
|
||||||
|
class ManageNotesPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_manage_notes")
|
||||||
|
|
||||||
|
|
||||||
|
class RunBulkPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_run_bulk")
|
||||||
@@ -4,7 +4,7 @@ from rest_framework import serializers
|
|||||||
from clients.serializers import ClientSerializer
|
from clients.serializers import ClientSerializer
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Agent, Note
|
from .models import Agent, AgentCustomField, Note
|
||||||
|
|
||||||
|
|
||||||
class AgentSerializer(serializers.ModelSerializer):
|
class AgentSerializer(serializers.ModelSerializer):
|
||||||
@@ -16,6 +16,7 @@ class AgentSerializer(serializers.ModelSerializer):
|
|||||||
local_ips = serializers.ReadOnlyField()
|
local_ips = serializers.ReadOnlyField()
|
||||||
make_model = serializers.ReadOnlyField()
|
make_model = serializers.ReadOnlyField()
|
||||||
physical_disks = serializers.ReadOnlyField()
|
physical_disks = serializers.ReadOnlyField()
|
||||||
|
graphics = serializers.ReadOnlyField()
|
||||||
checks = serializers.ReadOnlyField()
|
checks = serializers.ReadOnlyField()
|
||||||
timezone = serializers.ReadOnlyField()
|
timezone = serializers.ReadOnlyField()
|
||||||
all_timezones = serializers.SerializerMethodField()
|
all_timezones = serializers.SerializerMethodField()
|
||||||
@@ -57,16 +58,15 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
alert_template = serializers.SerializerMethodField()
|
alert_template = serializers.SerializerMethodField()
|
||||||
|
|
||||||
def get_alert_template(self, obj):
|
def get_alert_template(self, obj):
|
||||||
alert_template = obj.get_alert_template()
|
|
||||||
|
|
||||||
if not alert_template:
|
if not obj.alert_template:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return {
|
return {
|
||||||
"name": alert_template.name,
|
"name": obj.alert_template.name,
|
||||||
"always_email": alert_template.agent_always_email,
|
"always_email": obj.alert_template.agent_always_email,
|
||||||
"always_text": alert_template.agent_always_text,
|
"always_text": obj.alert_template.agent_always_text,
|
||||||
"always_alert": alert_template.agent_always_alert,
|
"always_alert": obj.alert_template.agent_always_alert,
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_pending_actions(self, obj):
|
def get_pending_actions(self, obj):
|
||||||
@@ -116,14 +116,35 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
|||||||
"logged_username",
|
"logged_username",
|
||||||
"italic",
|
"italic",
|
||||||
"policy",
|
"policy",
|
||||||
|
"block_policy_inheritance",
|
||||||
]
|
]
|
||||||
depth = 2
|
depth = 2
|
||||||
|
|
||||||
|
|
||||||
|
class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = AgentCustomField
|
||||||
|
fields = (
|
||||||
|
"id",
|
||||||
|
"field",
|
||||||
|
"agent",
|
||||||
|
"value",
|
||||||
|
"string_value",
|
||||||
|
"bool_value",
|
||||||
|
"multiple_value",
|
||||||
|
)
|
||||||
|
extra_kwargs = {
|
||||||
|
"string_value": {"write_only": True},
|
||||||
|
"bool_value": {"write_only": True},
|
||||||
|
"multiple_value": {"write_only": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class AgentEditSerializer(serializers.ModelSerializer):
|
class AgentEditSerializer(serializers.ModelSerializer):
|
||||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||||
all_timezones = serializers.SerializerMethodField()
|
all_timezones = serializers.SerializerMethodField()
|
||||||
client = ClientSerializer(read_only=True)
|
client = ClientSerializer(read_only=True)
|
||||||
|
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||||
|
|
||||||
def get_all_timezones(self, obj):
|
def get_all_timezones(self, obj):
|
||||||
return pytz.all_timezones
|
return pytz.all_timezones
|
||||||
@@ -147,6 +168,7 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
|||||||
"all_timezones",
|
"all_timezones",
|
||||||
"winupdatepolicy",
|
"winupdatepolicy",
|
||||||
"policy",
|
"policy",
|
||||||
|
"custom_fields",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
import random
|
import random
|
||||||
|
import urllib.parse
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import List, Union
|
from typing import Union
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
@@ -10,21 +11,21 @@ from loguru import logger
|
|||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from core.models import CoreSettings
|
from core.models import CodeSignToken, CoreSettings
|
||||||
from logs.models import PendingAction
|
from logs.models import PendingAction
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
from tacticalrmm.utils import run_nats_api_cmd
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
def agent_update(pk: int) -> str:
|
def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str:
|
||||||
|
from agents.utils import get_exegen_url
|
||||||
|
|
||||||
agent = Agent.objects.get(pk=pk)
|
agent = Agent.objects.get(pk=pk)
|
||||||
|
|
||||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
|
||||||
logger.warning(
|
|
||||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to auto update."
|
|
||||||
)
|
|
||||||
return "not supported"
|
return "not supported"
|
||||||
|
|
||||||
# skip if we can't determine the arch
|
# skip if we can't determine the arch
|
||||||
@@ -34,35 +35,33 @@ def agent_update(pk: int) -> str:
|
|||||||
)
|
)
|
||||||
return "noarch"
|
return "noarch"
|
||||||
|
|
||||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
version = settings.LATEST_AGENT_VER
|
||||||
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
inno = agent.win_inno_exe
|
||||||
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
|
||||||
version = settings.LATEST_AGENT_VER
|
if codesigntoken is not None and pyver.parse(version) >= pyver.parse("1.5.0"):
|
||||||
url = agent.winagent_dl
|
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||||
inno = agent.win_inno_exe
|
params = {"version": version, "arch": agent.arch, "token": codesigntoken}
|
||||||
|
url = base_url + urllib.parse.urlencode(params)
|
||||||
else:
|
else:
|
||||||
version = "1.3.0"
|
url = agent.winagent_dl
|
||||||
inno = (
|
|
||||||
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
|
||||||
)
|
|
||||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
|
||||||
|
|
||||||
if agent.pendingactions.filter(
|
if not force:
|
||||||
action_type="agentupdate", status="pending"
|
if agent.pendingactions.filter(
|
||||||
).exists():
|
|
||||||
agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
action_type="agentupdate", status="pending"
|
||||||
).delete()
|
).exists():
|
||||||
|
agent.pendingactions.filter(
|
||||||
|
action_type="agentupdate", status="pending"
|
||||||
|
).delete()
|
||||||
|
|
||||||
PendingAction.objects.create(
|
PendingAction.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
action_type="agentupdate",
|
action_type="agentupdate",
|
||||||
details={
|
details={
|
||||||
"url": url,
|
"url": url,
|
||||||
"version": version,
|
"version": version,
|
||||||
"inno": inno,
|
"inno": inno,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
nats_data = {
|
nats_data = {
|
||||||
"func": "agentupdate",
|
"func": "agentupdate",
|
||||||
@@ -77,11 +76,31 @@ def agent_update(pk: int) -> str:
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def send_agent_update_task(pks: List[int]) -> None:
|
def force_code_sign(pks: list[int]) -> None:
|
||||||
|
try:
|
||||||
|
token = CodeSignToken.objects.first().token
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
chunks = (pks[i : i + 50] for i in range(0, len(pks), 50))
|
||||||
|
for chunk in chunks:
|
||||||
|
for pk in chunk:
|
||||||
|
agent_update(pk=pk, codesigntoken=token, force=True)
|
||||||
|
sleep(0.05)
|
||||||
|
sleep(4)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def send_agent_update_task(pks: list[int]) -> None:
|
||||||
|
try:
|
||||||
|
codesigntoken = CodeSignToken.objects.first().token
|
||||||
|
except:
|
||||||
|
codesigntoken = None
|
||||||
|
|
||||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for pk in chunk:
|
for pk in chunk:
|
||||||
agent_update(pk)
|
agent_update(pk, codesigntoken)
|
||||||
sleep(0.05)
|
sleep(0.05)
|
||||||
sleep(4)
|
sleep(4)
|
||||||
|
|
||||||
@@ -92,8 +111,13 @@ def auto_self_agent_update_task() -> None:
|
|||||||
if not core.agent_auto_update:
|
if not core.agent_auto_update:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
codesigntoken = CodeSignToken.objects.first().token
|
||||||
|
except:
|
||||||
|
codesigntoken = None
|
||||||
|
|
||||||
q = Agent.objects.only("pk", "version")
|
q = Agent.objects.only("pk", "version")
|
||||||
pks: List[int] = [
|
pks: list[int] = [
|
||||||
i.pk
|
i.pk
|
||||||
for i in q
|
for i in q
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
@@ -102,7 +126,7 @@ def auto_self_agent_update_task() -> None:
|
|||||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for pk in chunk:
|
for pk in chunk:
|
||||||
agent_update(pk)
|
agent_update(pk, codesigntoken)
|
||||||
sleep(0.05)
|
sleep(0.05)
|
||||||
sleep(4)
|
sleep(4)
|
||||||
|
|
||||||
@@ -183,8 +207,11 @@ def agent_recovery_sms_task(pk: int) -> str:
|
|||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def agent_outages_task() -> None:
|
def agent_outages_task() -> None:
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
agents = Agent.objects.only(
|
agents = Agent.objects.only(
|
||||||
"pk",
|
"pk",
|
||||||
|
"agent_id",
|
||||||
"last_seen",
|
"last_seen",
|
||||||
"offline_time",
|
"offline_time",
|
||||||
"overdue_time",
|
"overdue_time",
|
||||||
@@ -195,30 +222,22 @@ def agent_outages_task() -> None:
|
|||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
if agent.status == "overdue":
|
if agent.status == "overdue":
|
||||||
agent.handle_alert()
|
Alert.handle_alert_failure(agent)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def handle_agent_recovery_task(pk: int) -> None:
|
|
||||||
sleep(10)
|
|
||||||
from agents.models import RecoveryAction
|
|
||||||
|
|
||||||
action = RecoveryAction.objects.get(pk=pk)
|
|
||||||
if action.mode == "command":
|
|
||||||
data = {"func": "recoverycmd", "recoverycommand": action.command}
|
|
||||||
else:
|
|
||||||
data = {"func": "recover", "payload": {"mode": action.mode}}
|
|
||||||
|
|
||||||
asyncio.run(action.agent.nats_cmd(data, wait=False))
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def run_script_email_results_task(
|
def run_script_email_results_task(
|
||||||
agentpk: int, scriptpk: int, nats_timeout: int, emails: List[str]
|
agentpk: int,
|
||||||
|
scriptpk: int,
|
||||||
|
nats_timeout: int,
|
||||||
|
emails: list[str],
|
||||||
|
args: list[str] = [],
|
||||||
):
|
):
|
||||||
agent = Agent.objects.get(pk=agentpk)
|
agent = Agent.objects.get(pk=agentpk)
|
||||||
script = Script.objects.get(pk=scriptpk)
|
script = Script.objects.get(pk=scriptpk)
|
||||||
r = agent.run_script(scriptpk=script.pk, full=True, timeout=nats_timeout, wait=True)
|
r = agent.run_script(
|
||||||
|
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
|
||||||
|
)
|
||||||
if r == "timeout":
|
if r == "timeout":
|
||||||
logger.error(f"{agent.hostname} timed out running script.")
|
logger.error(f"{agent.hostname} timed out running script.")
|
||||||
return
|
return
|
||||||
@@ -258,3 +277,49 @@ def run_script_email_results_task(
|
|||||||
server.quit()
|
server.quit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def clear_faults_task(older_than_days: int) -> None:
|
||||||
|
# https://github.com/wh1te909/tacticalrmm/issues/484
|
||||||
|
agents = Agent.objects.exclude(last_seen__isnull=True).filter(
|
||||||
|
last_seen__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||||
|
)
|
||||||
|
for agent in agents:
|
||||||
|
if agent.agentchecks.exists():
|
||||||
|
for check in agent.agentchecks.all():
|
||||||
|
# reset check status
|
||||||
|
check.status = "passing"
|
||||||
|
check.save(update_fields=["status"])
|
||||||
|
if check.alert.filter(resolved=False).exists():
|
||||||
|
check.alert.get(resolved=False).resolve()
|
||||||
|
|
||||||
|
# reset overdue alerts
|
||||||
|
agent.overdue_email_alert = False
|
||||||
|
agent.overdue_text_alert = False
|
||||||
|
agent.overdue_dashboard_alert = False
|
||||||
|
agent.save(
|
||||||
|
update_fields=[
|
||||||
|
"overdue_email_alert",
|
||||||
|
"overdue_text_alert",
|
||||||
|
"overdue_dashboard_alert",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def monitor_agents_task() -> None:
|
||||||
|
agents = Agent.objects.only(
|
||||||
|
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||||
|
)
|
||||||
|
ids = [i.agent_id for i in agents if i.status != "online"]
|
||||||
|
run_nats_api_cmd("monitor", ids)
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def get_wmi_task() -> None:
|
||||||
|
agents = Agent.objects.only(
|
||||||
|
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||||
|
)
|
||||||
|
ids = [i.agent_id for i in agents if i.status == "online"]
|
||||||
|
run_nats_api_cmd("wmi", ids)
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
from typing import List
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -13,11 +12,68 @@ from tacticalrmm.test import TacticalTestCase
|
|||||||
from winupdate.models import WinUpdatePolicy
|
from winupdate.models import WinUpdatePolicy
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Agent
|
from .models import Agent, AgentCustomField
|
||||||
from .serializers import AgentSerializer
|
from .serializers import AgentSerializer
|
||||||
from .tasks import auto_self_agent_update_task
|
from .tasks import auto_self_agent_update_task
|
||||||
|
|
||||||
|
|
||||||
|
class TestAgentsList(TacticalTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.authenticate()
|
||||||
|
self.setup_coresettings()
|
||||||
|
|
||||||
|
def test_agents_list(self):
|
||||||
|
url = "/agents/listagents/"
|
||||||
|
|
||||||
|
# 36 total agents
|
||||||
|
company1 = baker.make("clients.Client")
|
||||||
|
company2 = baker.make("clients.Client")
|
||||||
|
site1 = baker.make("clients.Site", client=company1)
|
||||||
|
site2 = baker.make("clients.Site", client=company1)
|
||||||
|
site3 = baker.make("clients.Site", client=company2)
|
||||||
|
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent", site=site1, monitoring_type="server", _quantity=15
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent",
|
||||||
|
site=site2,
|
||||||
|
monitoring_type="workstation",
|
||||||
|
_quantity=10,
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent",
|
||||||
|
site=site3,
|
||||||
|
monitoring_type="server",
|
||||||
|
_quantity=4,
|
||||||
|
)
|
||||||
|
baker.make_recipe(
|
||||||
|
"agents.online_agent",
|
||||||
|
site=site3,
|
||||||
|
monitoring_type="workstation",
|
||||||
|
_quantity=7,
|
||||||
|
)
|
||||||
|
|
||||||
|
# test all agents
|
||||||
|
r = self.client.patch(url, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(len(r.data), 36) # type: ignore
|
||||||
|
|
||||||
|
# test client1
|
||||||
|
data = {"clientPK": company1.pk} # type: ignore
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(len(r.data), 25) # type: ignore
|
||||||
|
|
||||||
|
# test site3
|
||||||
|
data = {"sitePK": site3.pk} # type: ignore
|
||||||
|
r = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(len(r.data), 11) # type: ignore
|
||||||
|
|
||||||
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
class TestAgentViews(TacticalTestCase):
|
class TestAgentViews(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
@@ -78,12 +134,12 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
_quantity=15,
|
_quantity=15,
|
||||||
)
|
)
|
||||||
|
|
||||||
pks: List[int] = list(
|
pks: list[int] = list(
|
||||||
Agent.objects.only("pk", "version").values_list("pk", flat=True)
|
Agent.objects.only("pk", "version").values_list("pk", flat=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
data = {"pks": pks}
|
data = {"pks": pks}
|
||||||
expected: List[int] = [
|
expected: list[int] = [
|
||||||
i.pk
|
i.pk
|
||||||
for i in Agent.objects.only("pk", "version")
|
for i in Agent.objects.only("pk", "version")
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
@@ -96,8 +152,9 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
|
@patch("time.sleep")
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_ping(self, nats_cmd):
|
def test_ping(self, nats_cmd, mock_sleep):
|
||||||
url = f"/agents/{self.agent.pk}/ping/"
|
url = f"/agents/{self.agent.pk}/ping/"
|
||||||
|
|
||||||
nats_cmd.return_value = "timeout"
|
nats_cmd.return_value = "timeout"
|
||||||
@@ -142,11 +199,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_get_processes(self, mock_ret):
|
def test_get_processes(self, mock_ret):
|
||||||
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
|
||||||
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
|
||||||
r = self.client.get(url_old)
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||||
url = f"/agents/{agent.pk}/getprocs/"
|
url = f"/agents/{agent.pk}/getprocs/"
|
||||||
|
|
||||||
@@ -257,7 +309,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
mock_ret.return_value = "nt authority\system"
|
mock_ret.return_value = "nt authority\system"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertIsInstance(r.data, str)
|
self.assertIsInstance(r.data, str) # type: ignore
|
||||||
|
|
||||||
mock_ret.return_value = "timeout"
|
mock_ret.return_value = "timeout"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
@@ -277,15 +329,16 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
r = self.client.patch(url, data, format="json")
|
r = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM") # type: ignore
|
||||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
self.assertEqual(r.data["agent"], self.agent.hostname) # type: ignore
|
||||||
|
|
||||||
nats_data = {
|
nats_data = {
|
||||||
"func": "schedtask",
|
"func": "schedtask",
|
||||||
"schedtaskpayload": {
|
"schedtaskpayload": {
|
||||||
"type": "schedreboot",
|
"type": "schedreboot",
|
||||||
|
"deleteafter": True,
|
||||||
"trigger": "once",
|
"trigger": "once",
|
||||||
"name": r.data["task_name"],
|
"name": r.data["task_name"], # type: ignore
|
||||||
"year": 2025,
|
"year": 2025,
|
||||||
"month": "August",
|
"month": "August",
|
||||||
"day": 29,
|
"day": 29,
|
||||||
@@ -306,53 +359,43 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
r = self.client.patch(url, data_invalid, format="json")
|
r = self.client.patch(url, data_invalid, format="json")
|
||||||
|
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertEqual(r.data, "Invalid date")
|
self.assertEqual(r.data, "Invalid date") # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
@patch("os.path.exists")
|
@patch("os.path.exists")
|
||||||
@patch("subprocess.run")
|
def test_install_agent(self, mock_file_exists):
|
||||||
def test_install_agent(self, mock_subprocess, mock_file_exists):
|
url = "/agents/installagent/"
|
||||||
url = f"/agents/installagent/"
|
|
||||||
|
|
||||||
site = baker.make("clients.Site")
|
site = baker.make("clients.Site")
|
||||||
data = {
|
data = {
|
||||||
"client": site.client.id,
|
"client": site.client.id, # type: ignore
|
||||||
"site": site.id,
|
"site": site.id, # type: ignore
|
||||||
"arch": "64",
|
"arch": "64",
|
||||||
"expires": 23,
|
"expires": 23,
|
||||||
"installMethod": "exe",
|
"installMethod": "manual",
|
||||||
"api": "https://api.example.com",
|
"api": "https://api.example.com",
|
||||||
"agenttype": "server",
|
"agenttype": "server",
|
||||||
"rdp": 1,
|
"rdp": 1,
|
||||||
"ping": 0,
|
"ping": 0,
|
||||||
"power": 0,
|
"power": 0,
|
||||||
|
"fileName": "rmm-client-site-server.exe",
|
||||||
}
|
}
|
||||||
|
|
||||||
mock_file_exists.return_value = False
|
mock_file_exists.return_value = False
|
||||||
mock_subprocess.return_value.returncode = 0
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 406)
|
self.assertEqual(r.status_code, 406)
|
||||||
|
|
||||||
mock_file_exists.return_value = True
|
mock_file_exists.return_value = True
|
||||||
mock_subprocess.return_value.returncode = 1
|
|
||||||
r = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(r.status_code, 413)
|
|
||||||
|
|
||||||
mock_file_exists.return_value = True
|
|
||||||
mock_subprocess.return_value.returncode = 0
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
data["arch"] = "32"
|
data["arch"] = "32"
|
||||||
mock_subprocess.return_value.returncode = 0
|
|
||||||
mock_file_exists.return_value = False
|
mock_file_exists.return_value = False
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 415)
|
self.assertEqual(r.status_code, 415)
|
||||||
|
|
||||||
data["installMethod"] = "manual"
|
|
||||||
data["arch"] = "64"
|
data["arch"] = "64"
|
||||||
mock_subprocess.return_value.returncode = 0
|
|
||||||
mock_file_exists.return_value = True
|
mock_file_exists.return_value = True
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertIn("rdp", r.json()["cmd"])
|
self.assertIn("rdp", r.json()["cmd"])
|
||||||
@@ -363,52 +406,74 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.assertIn("power", r.json()["cmd"])
|
self.assertIn("power", r.json()["cmd"])
|
||||||
self.assertIn("ping", r.json()["cmd"])
|
self.assertIn("ping", r.json()["cmd"])
|
||||||
|
|
||||||
|
data["installMethod"] = "powershell"
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
def test_recover(self):
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
|
def test_recover(self, nats_cmd):
|
||||||
from agents.models import RecoveryAction
|
from agents.models import RecoveryAction
|
||||||
|
|
||||||
self.agent.version = "0.11.1"
|
RecoveryAction.objects.all().delete()
|
||||||
self.agent.save(update_fields=["version"])
|
|
||||||
url = "/agents/recover/"
|
url = "/agents/recover/"
|
||||||
data = {"pk": self.agent.pk, "cmd": None, "mode": "mesh"}
|
agent = baker.make_recipe("agents.online_agent")
|
||||||
|
|
||||||
|
# test mesh realtime
|
||||||
|
data = {"pk": agent.pk, "cmd": None, "mode": "mesh"}
|
||||||
|
nats_cmd.return_value = "ok"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=10
|
||||||
|
)
|
||||||
|
nats_cmd.reset_mock()
|
||||||
|
|
||||||
data["mode"] = "mesh"
|
# test mesh with agent rpc not working
|
||||||
r = self.client.post(url, data, format="json")
|
data = {"pk": agent.pk, "cmd": None, "mode": "mesh"}
|
||||||
self.assertEqual(r.status_code, 400)
|
nats_cmd.return_value = "timeout"
|
||||||
self.assertIn("pending", r.json())
|
|
||||||
|
|
||||||
RecoveryAction.objects.all().delete()
|
|
||||||
data["mode"] = "command"
|
|
||||||
data["cmd"] = "ipconfig /flushdns"
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||||
RecoveryAction.objects.all().delete()
|
mesh_recovery = RecoveryAction.objects.first()
|
||||||
data["cmd"] = None
|
self.assertEqual(mesh_recovery.mode, "mesh")
|
||||||
r = self.client.post(url, data, format="json")
|
nats_cmd.reset_mock()
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
RecoveryAction.objects.all().delete()
|
RecoveryAction.objects.all().delete()
|
||||||
|
|
||||||
self.agent.version = "0.9.4"
|
# test tacagent realtime
|
||||||
self.agent.save(update_fields=["version"])
|
data = {"pk": agent.pk, "cmd": None, "mode": "tacagent"}
|
||||||
data["mode"] = "mesh"
|
nats_cmd.return_value = "ok"
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
self.assertIn("0.9.5", r.json())
|
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
|
||||||
|
|
||||||
def test_agents_list(self):
|
|
||||||
url = "/agents/listagents/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{"func": "recover", "payload": {"mode": "tacagent"}}, timeout=10
|
||||||
|
)
|
||||||
|
nats_cmd.reset_mock()
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
# test tacagent with rpc not working
|
||||||
|
data = {"pk": agent.pk, "cmd": None, "mode": "tacagent"}
|
||||||
|
nats_cmd.return_value = "timeout"
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||||
|
nats_cmd.reset_mock()
|
||||||
|
|
||||||
|
# test shell cmd without command
|
||||||
|
data = {"pk": agent.pk, "cmd": None, "mode": "command"}
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
self.assertEqual(RecoveryAction.objects.count(), 0)
|
||||||
|
|
||||||
|
# test shell cmd
|
||||||
|
data = {"pk": agent.pk, "cmd": "shutdown /r /t 10 /f", "mode": "command"}
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(RecoveryAction.objects.count(), 1)
|
||||||
|
cmd_recovery = RecoveryAction.objects.first()
|
||||||
|
self.assertEqual(cmd_recovery.mode, "command")
|
||||||
|
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f")
|
||||||
|
|
||||||
def test_agents_agent_detail(self):
|
def test_agents_agent_detail(self):
|
||||||
url = f"/agents/{self.agent.pk}/agentdetail/"
|
url = f"/agents/{self.agent.pk}/agentdetail/"
|
||||||
@@ -426,7 +491,7 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
edit = {
|
edit = {
|
||||||
"id": self.agent.pk,
|
"id": self.agent.pk,
|
||||||
"site": site.id,
|
"site": site.id, # type: ignore
|
||||||
"monitoring_type": "workstation",
|
"monitoring_type": "workstation",
|
||||||
"description": "asjdk234andasd",
|
"description": "asjdk234andasd",
|
||||||
"offline_time": 4,
|
"offline_time": 4,
|
||||||
@@ -457,12 +522,41 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
agent = Agent.objects.get(pk=self.agent.pk)
|
||||||
data = AgentSerializer(agent).data
|
data = AgentSerializer(agent).data
|
||||||
self.assertEqual(data["site"], site.id)
|
self.assertEqual(data["site"], site.id) # type: ignore
|
||||||
|
|
||||||
policy = WinUpdatePolicy.objects.get(agent=self.agent)
|
policy = WinUpdatePolicy.objects.get(agent=self.agent)
|
||||||
data = WinUpdatePolicySerializer(policy).data
|
data = WinUpdatePolicySerializer(policy).data
|
||||||
self.assertEqual(data["run_time_days"], [2, 3, 6])
|
self.assertEqual(data["run_time_days"], [2, 3, 6])
|
||||||
|
|
||||||
|
# test adding custom fields
|
||||||
|
field = baker.make("core.CustomField", model="agent", type="number")
|
||||||
|
edit = {
|
||||||
|
"id": self.agent.pk,
|
||||||
|
"site": site.id, # type: ignore
|
||||||
|
"description": "asjdk234andasd",
|
||||||
|
"custom_fields": [{"field": field.id, "string_value": "123"}], # type: ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, edit, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertTrue(
|
||||||
|
AgentCustomField.objects.filter(agent=self.agent, field=field).exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
# test edit custom field
|
||||||
|
edit = {
|
||||||
|
"id": self.agent.pk,
|
||||||
|
"site": site.id, # type: ignore
|
||||||
|
"description": "asjdk234andasd",
|
||||||
|
"custom_fields": [{"field": field.id, "string_value": "456"}], # type: ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, edit, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
AgentCustomField.objects.get(agent=agent, field=field).value,
|
||||||
|
"456",
|
||||||
|
)
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.get_login_token")
|
@patch("agents.models.Agent.get_login_token")
|
||||||
@@ -475,21 +569,21 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
# TODO
|
# TODO
|
||||||
# decode the cookie
|
# decode the cookie
|
||||||
|
|
||||||
self.assertIn("&viewmode=13", r.data["file"])
|
self.assertIn("&viewmode=13", r.data["file"]) # type: ignore
|
||||||
self.assertIn("&viewmode=12", r.data["terminal"])
|
self.assertIn("&viewmode=12", r.data["terminal"]) # type: ignore
|
||||||
self.assertIn("&viewmode=11", r.data["control"])
|
self.assertIn("&viewmode=11", r.data["control"]) # type: ignore
|
||||||
|
|
||||||
self.assertIn("&gotonode=", r.data["file"])
|
self.assertIn("&gotonode=", r.data["file"]) # type: ignore
|
||||||
self.assertIn("&gotonode=", r.data["terminal"])
|
self.assertIn("&gotonode=", r.data["terminal"]) # type: ignore
|
||||||
self.assertIn("&gotonode=", r.data["control"])
|
self.assertIn("&gotonode=", r.data["control"]) # type: ignore
|
||||||
|
|
||||||
self.assertIn("?login=", r.data["file"])
|
self.assertIn("?login=", r.data["file"]) # type: ignore
|
||||||
self.assertIn("?login=", r.data["terminal"])
|
self.assertIn("?login=", r.data["terminal"]) # type: ignore
|
||||||
self.assertIn("?login=", r.data["control"])
|
self.assertIn("?login=", r.data["control"]) # type: ignore
|
||||||
|
|
||||||
self.assertEqual(self.agent.hostname, r.data["hostname"])
|
self.assertEqual(self.agent.hostname, r.data["hostname"]) # type: ignore
|
||||||
self.assertEqual(self.agent.client.name, r.data["client"])
|
self.assertEqual(self.agent.client.name, r.data["client"]) # type: ignore
|
||||||
self.assertEqual(self.agent.site.name, r.data["site"])
|
self.assertEqual(self.agent.site.name, r.data["site"]) # type: ignore
|
||||||
|
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
@@ -499,32 +593,6 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_by_client(self):
|
|
||||||
url = f"/agents/byclient/{self.agent.client.id}/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertTrue(r.data)
|
|
||||||
|
|
||||||
url = f"/agents/byclient/500/"
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertFalse(r.data) # returns empty list
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_by_site(self):
|
|
||||||
url = f"/agents/bysite/{self.agent.site.id}/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertTrue(r.data)
|
|
||||||
|
|
||||||
url = f"/agents/bysite/500/"
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.data, [])
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
|
||||||
|
|
||||||
def test_overdue_action(self):
|
def test_overdue_action(self):
|
||||||
url = "/agents/overdueaction/"
|
url = "/agents/overdueaction/"
|
||||||
|
|
||||||
@@ -533,14 +601,14 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
agent = Agent.objects.get(pk=self.agent.pk)
|
||||||
self.assertTrue(agent.overdue_email_alert)
|
self.assertTrue(agent.overdue_email_alert)
|
||||||
self.assertEqual(self.agent.hostname, r.data)
|
self.assertEqual(self.agent.hostname, r.data) # type: ignore
|
||||||
|
|
||||||
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
agent = Agent.objects.get(pk=self.agent.pk)
|
agent = Agent.objects.get(pk=self.agent.pk)
|
||||||
self.assertFalse(agent.overdue_text_alert)
|
self.assertFalse(agent.overdue_text_alert)
|
||||||
self.assertEqual(self.agent.hostname, r.data)
|
self.assertEqual(self.agent.hostname, r.data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@@ -684,9 +752,9 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertIn(self.agent.hostname, r.data)
|
self.assertIn(self.agent.hostname, r.data) # type: ignore
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
|
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=90
|
||||||
)
|
)
|
||||||
|
|
||||||
nats_cmd.return_value = "timeout"
|
nats_cmd.return_value = "timeout"
|
||||||
@@ -699,13 +767,84 @@ class TestAgentViews(TacticalTestCase):
|
|||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
@patch("agents.tasks.run_script_email_results_task.delay")
|
||||||
|
@patch("agents.models.Agent.run_script")
|
||||||
|
def test_run_script(self, run_script, email_task):
|
||||||
|
run_script.return_value = "ok"
|
||||||
|
url = "/agents/runscript/"
|
||||||
|
script = baker.make_recipe("scripts.script")
|
||||||
|
|
||||||
|
# test wait
|
||||||
|
data = {
|
||||||
|
"pk": self.agent.pk,
|
||||||
|
"scriptPK": script.pk,
|
||||||
|
"output": "wait",
|
||||||
|
"args": [],
|
||||||
|
"timeout": 15,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
run_script.assert_called_with(
|
||||||
|
scriptpk=script.pk, args=[], timeout=18, wait=True
|
||||||
|
)
|
||||||
|
run_script.reset_mock()
|
||||||
|
|
||||||
|
# test email default
|
||||||
|
data = {
|
||||||
|
"pk": self.agent.pk,
|
||||||
|
"scriptPK": script.pk,
|
||||||
|
"output": "email",
|
||||||
|
"args": ["abc", "123"],
|
||||||
|
"timeout": 15,
|
||||||
|
"emailmode": "default",
|
||||||
|
"emails": ["admin@example.com", "bob@example.com"],
|
||||||
|
}
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
email_task.assert_called_with(
|
||||||
|
agentpk=self.agent.pk,
|
||||||
|
scriptpk=script.pk,
|
||||||
|
nats_timeout=18,
|
||||||
|
emails=[],
|
||||||
|
args=["abc", "123"],
|
||||||
|
)
|
||||||
|
email_task.reset_mock()
|
||||||
|
|
||||||
|
# test email overrides
|
||||||
|
data["emailmode"] = "custom"
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
email_task.assert_called_with(
|
||||||
|
agentpk=self.agent.pk,
|
||||||
|
scriptpk=script.pk,
|
||||||
|
nats_timeout=18,
|
||||||
|
emails=["admin@example.com", "bob@example.com"],
|
||||||
|
args=["abc", "123"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# test fire and forget
|
||||||
|
data = {
|
||||||
|
"pk": self.agent.pk,
|
||||||
|
"scriptPK": script.pk,
|
||||||
|
"output": "forget",
|
||||||
|
"args": ["hello", "world"],
|
||||||
|
"timeout": 22,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
run_script.assert_called_with(
|
||||||
|
scriptpk=script.pk, args=["hello", "world"], timeout=25
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestAgentViewsNew(TacticalTestCase):
|
class TestAgentViewsNew(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
def test_agent_counts(self):
|
""" def test_agent_counts(self):
|
||||||
url = "/agents/agent_counts/"
|
url = "/agents/agent_counts/"
|
||||||
|
|
||||||
# create some data
|
# create some data
|
||||||
@@ -730,9 +869,9 @@ class TestAgentViewsNew(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.post(url, format="json")
|
r = self.client.post(url, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, data)
|
self.assertEqual(r.data, data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url) """
|
||||||
|
|
||||||
def test_agent_maintenance_mode(self):
|
def test_agent_maintenance_mode(self):
|
||||||
url = "/agents/maintenance/"
|
url = "/agents/maintenance/"
|
||||||
@@ -742,14 +881,14 @@ class TestAgentViewsNew(TacticalTestCase):
|
|||||||
agent = baker.make_recipe("agents.agent", site=site)
|
agent = baker.make_recipe("agents.agent", site=site)
|
||||||
|
|
||||||
# Test client toggle maintenance mode
|
# Test client toggle maintenance mode
|
||||||
data = {"type": "Client", "id": site.client.id, "action": True}
|
data = {"type": "Client", "id": site.client.id, "action": True} # type: ignore
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(Agent.objects.get(pk=agent.pk).maintenance_mode)
|
self.assertTrue(Agent.objects.get(pk=agent.pk).maintenance_mode)
|
||||||
|
|
||||||
# Test site toggle maintenance mode
|
# Test site toggle maintenance mode
|
||||||
data = {"type": "Site", "id": site.id, "action": False}
|
data = {"type": "Site", "id": site.id, "action": False} # type: ignore
|
||||||
|
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
@@ -776,8 +915,9 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
|
@patch("agents.utils.get_exegen_url")
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_agent_update(self, nats_cmd):
|
def test_agent_update(self, nats_cmd, get_exe):
|
||||||
from agents.tasks import agent_update
|
from agents.tasks import agent_update
|
||||||
|
|
||||||
agent_noarch = baker.make_recipe(
|
agent_noarch = baker.make_recipe(
|
||||||
@@ -788,63 +928,96 @@ class TestAgentTasks(TacticalTestCase):
|
|||||||
r = agent_update(agent_noarch.pk)
|
r = agent_update(agent_noarch.pk)
|
||||||
self.assertEqual(r, "noarch")
|
self.assertEqual(r, "noarch")
|
||||||
|
|
||||||
agent_1111 = baker.make_recipe(
|
agent_130 = baker.make_recipe(
|
||||||
"agents.agent",
|
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
|
||||||
version="1.1.11",
|
|
||||||
)
|
|
||||||
r = agent_update(agent_1111.pk)
|
|
||||||
self.assertEqual(r, "not supported")
|
|
||||||
|
|
||||||
agent64_1112 = baker.make_recipe(
|
|
||||||
"agents.agent",
|
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
|
||||||
version="1.1.12",
|
|
||||||
)
|
|
||||||
|
|
||||||
r = agent_update(agent64_1112.pk)
|
|
||||||
self.assertEqual(r, "created")
|
|
||||||
action = PendingAction.objects.get(agent__pk=agent64_1112.pk)
|
|
||||||
self.assertEqual(action.action_type, "agentupdate")
|
|
||||||
self.assertEqual(action.status, "pending")
|
|
||||||
self.assertEqual(
|
|
||||||
action.details["url"],
|
|
||||||
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
|
||||||
)
|
|
||||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
|
||||||
self.assertEqual(action.details["version"], "1.3.0")
|
|
||||||
nats_cmd.assert_called_with(
|
|
||||||
{
|
|
||||||
"func": "agentupdate",
|
|
||||||
"payload": {
|
|
||||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
|
||||||
"version": "1.3.0",
|
|
||||||
"inno": "winagent-v1.3.0.exe",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
wait=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
agent_64_130 = baker.make_recipe(
|
|
||||||
"agents.agent",
|
"agents.agent",
|
||||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
version="1.3.0",
|
version="1.3.0",
|
||||||
)
|
)
|
||||||
nats_cmd.return_value = "ok"
|
r = agent_update(agent_130.pk)
|
||||||
r = agent_update(agent_64_130.pk)
|
self.assertEqual(r, "not supported")
|
||||||
|
|
||||||
|
# test __without__ code signing
|
||||||
|
agent64_nosign = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.4.14",
|
||||||
|
)
|
||||||
|
|
||||||
|
r = agent_update(agent64_nosign.pk, None)
|
||||||
self.assertEqual(r, "created")
|
self.assertEqual(r, "created")
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent64_nosign.pk)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
self.assertEqual(
|
||||||
|
action.details["url"],
|
||||||
|
f"https://github.com/wh1te909/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
action.details["inno"], f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||||
|
)
|
||||||
|
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{
|
{
|
||||||
"func": "agentupdate",
|
"func": "agentupdate",
|
||||||
"payload": {
|
"payload": {
|
||||||
"url": settings.DL_64,
|
"url": f"https://github.com/wh1te909/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
"version": settings.LATEST_AGENT_VER,
|
"version": settings.LATEST_AGENT_VER,
|
||||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wait=False,
|
wait=False,
|
||||||
)
|
)
|
||||||
action = PendingAction.objects.get(agent__pk=agent_64_130.pk)
|
|
||||||
|
# test __with__ code signing (64 bit)
|
||||||
|
codesign = baker.make("core.CodeSignToken", token="testtoken123")
|
||||||
|
agent64_sign = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||||
|
version="1.4.14",
|
||||||
|
)
|
||||||
|
|
||||||
|
nats_cmd.return_value = "ok"
|
||||||
|
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||||
|
r = agent_update(agent64_sign.pk, codesign.token) # type: ignore
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=64&token=testtoken123", # type: ignore
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wait=False,
|
||||||
|
)
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent64_sign.pk)
|
||||||
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
|
# test __with__ code signing (32 bit)
|
||||||
|
agent32_sign = baker.make_recipe(
|
||||||
|
"agents.agent",
|
||||||
|
operating_system="Windows 10 Pro, 32 bit (build 19041.450)",
|
||||||
|
version="1.4.14",
|
||||||
|
)
|
||||||
|
|
||||||
|
nats_cmd.return_value = "ok"
|
||||||
|
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||||
|
r = agent_update(agent32_sign.pk, codesign.token) # type: ignore
|
||||||
|
self.assertEqual(r, "created")
|
||||||
|
nats_cmd.assert_called_with(
|
||||||
|
{
|
||||||
|
"func": "agentupdate",
|
||||||
|
"payload": {
|
||||||
|
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=32&token=testtoken123", # type: ignore
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wait=False,
|
||||||
|
)
|
||||||
|
action = PendingAction.objects.get(agent__pk=agent32_sign.pk)
|
||||||
self.assertEqual(action.action_type, "agentupdate")
|
self.assertEqual(action.action_type, "agentupdate")
|
||||||
self.assertEqual(action.status, "pending")
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,6 @@ urlpatterns = [
|
|||||||
path("listagents/", views.AgentsTableList.as_view()),
|
path("listagents/", views.AgentsTableList.as_view()),
|
||||||
path("listagentsnodetail/", views.list_agents_no_detail),
|
path("listagentsnodetail/", views.list_agents_no_detail),
|
||||||
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
||||||
path("byclient/<int:clientpk>/", views.by_client),
|
|
||||||
path("bysite/<int:sitepk>/", views.by_site),
|
|
||||||
path("overdueaction/", views.overdue_action),
|
path("overdueaction/", views.overdue_action),
|
||||||
path("sendrawcmd/", views.send_raw_cmd),
|
path("sendrawcmd/", views.send_raw_cmd),
|
||||||
path("<pk>/agentdetail/", views.agent_detail),
|
path("<pk>/agentdetail/", views.agent_detail),
|
||||||
@@ -29,7 +27,6 @@ urlpatterns = [
|
|||||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
||||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
||||||
path("bulk/", views.bulk),
|
path("bulk/", views.bulk),
|
||||||
path("agent_counts/", views.agent_counts),
|
|
||||||
path("maintenance/", views.agent_maintenance),
|
path("maintenance/", views.agent_maintenance),
|
||||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
37
api/tacticalrmm/agents/utils.py
Normal file
37
api/tacticalrmm/agents/utils.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import random
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
def get_exegen_url() -> str:
|
||||||
|
urls: list[str] = settings.EXE_GEN_URLS
|
||||||
|
for url in urls:
|
||||||
|
try:
|
||||||
|
r = requests.get(url, timeout=10)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if r.status_code == 200:
|
||||||
|
return url
|
||||||
|
|
||||||
|
return random.choice(urls)
|
||||||
|
|
||||||
|
|
||||||
|
def get_winagent_url(arch: str) -> str:
|
||||||
|
from core.models import CodeSignToken
|
||||||
|
|
||||||
|
try:
|
||||||
|
codetoken = CodeSignToken.objects.first().token
|
||||||
|
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||||
|
params = {
|
||||||
|
"version": settings.LATEST_AGENT_VER,
|
||||||
|
"arch": arch,
|
||||||
|
"token": codetoken,
|
||||||
|
}
|
||||||
|
dl_url = base_url + urllib.parse.urlencode(params)
|
||||||
|
except:
|
||||||
|
dl_url = settings.DL_64 if arch == "64" else settings.DL_32
|
||||||
|
|
||||||
|
return dl_url
|
||||||
@@ -3,16 +3,16 @@ import datetime as dt
|
|||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import subprocess
|
import time
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
from rest_framework import generics, status
|
from rest_framework import status
|
||||||
from rest_framework.decorators import api_view
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
@@ -24,8 +24,23 @@ from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
|||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||||
|
|
||||||
from .models import Agent, Note, RecoveryAction
|
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||||
|
from .permissions import (
|
||||||
|
EditAgentPerms,
|
||||||
|
EvtLogPerms,
|
||||||
|
InstallAgentPerms,
|
||||||
|
ManageNotesPerms,
|
||||||
|
ManageProcPerms,
|
||||||
|
MeshPerms,
|
||||||
|
RebootAgentPerms,
|
||||||
|
RunBulkPerms,
|
||||||
|
RunScriptPerms,
|
||||||
|
SendCMDPerms,
|
||||||
|
UninstallPerms,
|
||||||
|
UpdateAgentPerms,
|
||||||
|
)
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
|
AgentCustomFieldSerializer,
|
||||||
AgentEditSerializer,
|
AgentEditSerializer,
|
||||||
AgentHostnameSerializer,
|
AgentHostnameSerializer,
|
||||||
AgentOverdueActionSerializer,
|
AgentOverdueActionSerializer,
|
||||||
@@ -41,7 +56,7 @@ logger.configure(**settings.LOG_CONFIG)
|
|||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
def get_agent_versions(request):
|
def get_agent_versions(request):
|
||||||
agents = Agent.objects.only("pk")
|
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"versions": [settings.LATEST_AGENT_VER],
|
"versions": [settings.LATEST_AGENT_VER],
|
||||||
@@ -51,9 +66,10 @@ def get_agent_versions(request):
|
|||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated, UpdateAgentPerms])
|
||||||
def update_agents(request):
|
def update_agents(request):
|
||||||
q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version")
|
q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version")
|
||||||
pks: List[int] = [
|
pks: list[int] = [
|
||||||
i.pk
|
i.pk
|
||||||
for i in q
|
for i in q
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
@@ -63,30 +79,39 @@ def update_agents(request):
|
|||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
|
@permission_classes([IsAuthenticated, UninstallPerms])
|
||||||
def ping(request, pk):
|
def ping(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
status = "offline"
|
status = "offline"
|
||||||
if agent.has_nats:
|
attempts = 0
|
||||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
while 1:
|
||||||
|
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2))
|
||||||
if r == "pong":
|
if r == "pong":
|
||||||
status = "online"
|
status = "online"
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
attempts += 1
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
if attempts >= 5:
|
||||||
|
break
|
||||||
|
|
||||||
return Response({"name": agent.hostname, "status": status})
|
return Response({"name": agent.hostname, "status": status})
|
||||||
|
|
||||||
|
|
||||||
@api_view(["DELETE"])
|
@api_view(["DELETE"])
|
||||||
|
@permission_classes([IsAuthenticated, UninstallPerms])
|
||||||
def uninstall(request):
|
def uninstall(request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
if agent.has_nats:
|
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
|
||||||
|
|
||||||
name = agent.hostname
|
name = agent.hostname
|
||||||
agent.delete()
|
agent.delete()
|
||||||
reload_nats()
|
reload_nats()
|
||||||
return Response(f"{name} will now be uninstalled.")
|
return Response(f"{name} will now be uninstalled.")
|
||||||
|
|
||||||
|
|
||||||
@api_view(["PATCH"])
|
@api_view(["PATCH", "PUT"])
|
||||||
|
@permission_classes([IsAuthenticated, EditAgentPerms])
|
||||||
def edit_agent(request):
|
def edit_agent(request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||||
|
|
||||||
@@ -95,17 +120,41 @@ def edit_agent(request):
|
|||||||
a_serializer.save()
|
a_serializer.save()
|
||||||
|
|
||||||
if "winupdatepolicy" in request.data.keys():
|
if "winupdatepolicy" in request.data.keys():
|
||||||
policy = agent.winupdatepolicy.get()
|
policy = agent.winupdatepolicy.get() # type: ignore
|
||||||
p_serializer = WinUpdatePolicySerializer(
|
p_serializer = WinUpdatePolicySerializer(
|
||||||
instance=policy, data=request.data["winupdatepolicy"][0]
|
instance=policy, data=request.data["winupdatepolicy"][0]
|
||||||
)
|
)
|
||||||
p_serializer.is_valid(raise_exception=True)
|
p_serializer.is_valid(raise_exception=True)
|
||||||
p_serializer.save()
|
p_serializer.save()
|
||||||
|
|
||||||
|
if "custom_fields" in request.data.keys():
|
||||||
|
|
||||||
|
for field in request.data["custom_fields"]:
|
||||||
|
|
||||||
|
custom_field = field
|
||||||
|
custom_field["agent"] = agent.id # type: ignore
|
||||||
|
|
||||||
|
if AgentCustomField.objects.filter(
|
||||||
|
field=field["field"], agent=agent.id # type: ignore
|
||||||
|
):
|
||||||
|
value = AgentCustomField.objects.get(
|
||||||
|
field=field["field"], agent=agent.id # type: ignore
|
||||||
|
)
|
||||||
|
serializer = AgentCustomFieldSerializer(
|
||||||
|
instance=value, data=custom_field
|
||||||
|
)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
else:
|
||||||
|
serializer = AgentCustomFieldSerializer(data=custom_field)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
|
@permission_classes([IsAuthenticated, MeshPerms])
|
||||||
def meshcentral(request, pk):
|
def meshcentral(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
core = CoreSettings.objects.first()
|
core = CoreSettings.objects.first()
|
||||||
@@ -144,9 +193,6 @@ def agent_detail(request, pk):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def get_processes(request, pk):
|
def get_processes(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
|
||||||
return notify_error("Requires agent version 1.2.0 or greater")
|
|
||||||
|
|
||||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||||
if r == "timeout":
|
if r == "timeout":
|
||||||
return notify_error("Unable to contact the agent")
|
return notify_error("Unable to contact the agent")
|
||||||
@@ -154,11 +200,9 @@ def get_processes(request, pk):
|
|||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
|
@permission_classes([IsAuthenticated, ManageProcPerms])
|
||||||
def kill_proc(request, pk, pid):
|
def kill_proc(request, pk, pid):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
|
|
||||||
r = asyncio.run(
|
r = asyncio.run(
|
||||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||||
)
|
)
|
||||||
@@ -172,10 +216,9 @@ def kill_proc(request, pk, pid):
|
|||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
|
@permission_classes([IsAuthenticated, EvtLogPerms])
|
||||||
def get_event_log(request, pk, logtype, days):
|
def get_event_log(request, pk, logtype, days):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
timeout = 180 if logtype == "Security" else 30
|
timeout = 180 if logtype == "Security" else 30
|
||||||
data = {
|
data = {
|
||||||
"func": "eventlog",
|
"func": "eventlog",
|
||||||
@@ -193,10 +236,9 @@ def get_event_log(request, pk, logtype, days):
|
|||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated, SendCMDPerms])
|
||||||
def send_raw_cmd(request):
|
def send_raw_cmd(request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
timeout = int(request.data["timeout"])
|
timeout = int(request.data["timeout"])
|
||||||
data = {
|
data = {
|
||||||
"func": "rawcmd",
|
"func": "rawcmd",
|
||||||
@@ -221,15 +263,32 @@ def send_raw_cmd(request):
|
|||||||
return Response(r)
|
return Response(r)
|
||||||
|
|
||||||
|
|
||||||
class AgentsTableList(generics.ListAPIView):
|
class AgentsTableList(APIView):
|
||||||
queryset = (
|
def patch(self, request):
|
||||||
Agent.objects.select_related("site")
|
if "sitePK" in request.data.keys():
|
||||||
.prefetch_related("agentchecks")
|
queryset = (
|
||||||
.only(
|
Agent.objects.select_related("site", "policy", "alert_template")
|
||||||
|
.prefetch_related("agentchecks")
|
||||||
|
.filter(site_id=request.data["sitePK"])
|
||||||
|
)
|
||||||
|
elif "clientPK" in request.data.keys():
|
||||||
|
queryset = (
|
||||||
|
Agent.objects.select_related("site", "policy", "alert_template")
|
||||||
|
.prefetch_related("agentchecks")
|
||||||
|
.filter(site__client_id=request.data["clientPK"])
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
queryset = Agent.objects.select_related(
|
||||||
|
"site", "policy", "alert_template"
|
||||||
|
).prefetch_related("agentchecks")
|
||||||
|
|
||||||
|
queryset = queryset.only(
|
||||||
"pk",
|
"pk",
|
||||||
"hostname",
|
"hostname",
|
||||||
"agent_id",
|
"agent_id",
|
||||||
"site",
|
"site",
|
||||||
|
"policy",
|
||||||
|
"alert_template",
|
||||||
"monitoring_type",
|
"monitoring_type",
|
||||||
"description",
|
"description",
|
||||||
"needs_reboot",
|
"needs_reboot",
|
||||||
@@ -244,11 +303,6 @@ class AgentsTableList(generics.ListAPIView):
|
|||||||
"time_zone",
|
"time_zone",
|
||||||
"maintenance_mode",
|
"maintenance_mode",
|
||||||
)
|
)
|
||||||
)
|
|
||||||
serializer_class = AgentTableSerializer
|
|
||||||
|
|
||||||
def list(self, request):
|
|
||||||
queryset = self.get_queryset()
|
|
||||||
ctx = {"default_tz": get_default_timezone()}
|
ctx = {"default_tz": get_default_timezone()}
|
||||||
serializer = AgentTableSerializer(queryset, many=True, context=ctx)
|
serializer = AgentTableSerializer(queryset, many=True, context=ctx)
|
||||||
return Response(serializer.data)
|
return Response(serializer.data)
|
||||||
@@ -266,66 +320,6 @@ def agent_edit_details(request, pk):
|
|||||||
return Response(AgentEditSerializer(agent).data)
|
return Response(AgentEditSerializer(agent).data)
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
|
||||||
def by_client(request, clientpk):
|
|
||||||
agents = (
|
|
||||||
Agent.objects.select_related("site")
|
|
||||||
.filter(site__client_id=clientpk)
|
|
||||||
.prefetch_related("agentchecks")
|
|
||||||
.only(
|
|
||||||
"pk",
|
|
||||||
"hostname",
|
|
||||||
"agent_id",
|
|
||||||
"site",
|
|
||||||
"monitoring_type",
|
|
||||||
"description",
|
|
||||||
"needs_reboot",
|
|
||||||
"overdue_text_alert",
|
|
||||||
"overdue_email_alert",
|
|
||||||
"overdue_time",
|
|
||||||
"offline_time",
|
|
||||||
"last_seen",
|
|
||||||
"boot_time",
|
|
||||||
"logged_in_username",
|
|
||||||
"last_logged_in_user",
|
|
||||||
"time_zone",
|
|
||||||
"maintenance_mode",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
ctx = {"default_tz": get_default_timezone()}
|
|
||||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
|
||||||
def by_site(request, sitepk):
|
|
||||||
agents = (
|
|
||||||
Agent.objects.filter(site_id=sitepk)
|
|
||||||
.select_related("site")
|
|
||||||
.prefetch_related("agentchecks")
|
|
||||||
.only(
|
|
||||||
"pk",
|
|
||||||
"hostname",
|
|
||||||
"agent_id",
|
|
||||||
"site",
|
|
||||||
"monitoring_type",
|
|
||||||
"description",
|
|
||||||
"needs_reboot",
|
|
||||||
"overdue_text_alert",
|
|
||||||
"overdue_email_alert",
|
|
||||||
"overdue_time",
|
|
||||||
"offline_time",
|
|
||||||
"last_seen",
|
|
||||||
"boot_time",
|
|
||||||
"logged_in_username",
|
|
||||||
"last_logged_in_user",
|
|
||||||
"time_zone",
|
|
||||||
"maintenance_mode",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
ctx = {"default_tz": get_default_timezone()}
|
|
||||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
|
||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
def overdue_action(request):
|
def overdue_action(request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
@@ -338,12 +332,10 @@ def overdue_action(request):
|
|||||||
|
|
||||||
|
|
||||||
class Reboot(APIView):
|
class Reboot(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, RebootAgentPerms]
|
||||||
# reboot now
|
# reboot now
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
|
|
||||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||||
if r != "ok":
|
if r != "ok":
|
||||||
return notify_error("Unable to contact the agent")
|
return notify_error("Unable to contact the agent")
|
||||||
@@ -353,8 +345,6 @@ class Reboot(APIView):
|
|||||||
# reboot later
|
# reboot later
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
if not agent.has_gotasks:
|
|
||||||
return notify_error("Requires agent version 1.1.1 or greater")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||||
@@ -369,6 +359,7 @@ class Reboot(APIView):
|
|||||||
"func": "schedtask",
|
"func": "schedtask",
|
||||||
"schedtaskpayload": {
|
"schedtaskpayload": {
|
||||||
"type": "schedreboot",
|
"type": "schedreboot",
|
||||||
|
"deleteafter": True,
|
||||||
"trigger": "once",
|
"trigger": "once",
|
||||||
"name": task_name,
|
"name": task_name,
|
||||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||||
@@ -379,9 +370,6 @@ class Reboot(APIView):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
|
||||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
|
||||||
|
|
||||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||||
if r != "ok":
|
if r != "ok":
|
||||||
return notify_error(r)
|
return notify_error(r)
|
||||||
@@ -397,9 +385,12 @@ class Reboot(APIView):
|
|||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated, InstallAgentPerms])
|
||||||
def install_agent(request):
|
def install_agent(request):
|
||||||
from knox.models import AuthToken
|
from knox.models import AuthToken
|
||||||
|
|
||||||
|
from agents.utils import get_winagent_url
|
||||||
|
|
||||||
client_id = request.data["client"]
|
client_id = request.data["client"]
|
||||||
site_id = request.data["site"]
|
site_id = request.data["site"]
|
||||||
version = settings.LATEST_AGENT_VER
|
version = settings.LATEST_AGENT_VER
|
||||||
@@ -420,131 +411,27 @@ def install_agent(request):
|
|||||||
inno = (
|
inno = (
|
||||||
f"winagent-v{version}.exe" if arch == "64" else f"winagent-v{version}-x86.exe"
|
f"winagent-v{version}.exe" if arch == "64" else f"winagent-v{version}-x86.exe"
|
||||||
)
|
)
|
||||||
download_url = settings.DL_64 if arch == "64" else settings.DL_32
|
download_url = get_winagent_url(arch)
|
||||||
|
|
||||||
_, token = AuthToken.objects.create(
|
_, token = AuthToken.objects.create(
|
||||||
user=request.user, expiry=dt.timedelta(hours=request.data["expires"])
|
user=request.user, expiry=dt.timedelta(hours=request.data["expires"])
|
||||||
)
|
)
|
||||||
|
|
||||||
if request.data["installMethod"] == "exe":
|
if request.data["installMethod"] == "exe":
|
||||||
go_bin = "/usr/local/rmmgo/go/bin/go"
|
from tacticalrmm.utils import generate_winagent_exe
|
||||||
|
|
||||||
if not os.path.exists(go_bin):
|
return generate_winagent_exe(
|
||||||
return Response("nogolang", status=status.HTTP_409_CONFLICT)
|
client=client_id,
|
||||||
|
site=site_id,
|
||||||
api = request.data["api"]
|
agent_type=request.data["agenttype"],
|
||||||
atype = request.data["agenttype"]
|
rdp=request.data["rdp"],
|
||||||
rdp = request.data["rdp"]
|
ping=request.data["ping"],
|
||||||
ping = request.data["ping"]
|
power=request.data["power"],
|
||||||
power = request.data["power"]
|
arch=arch,
|
||||||
|
token=token,
|
||||||
file_name = "rmm-installer.exe"
|
api=request.data["api"],
|
||||||
exe = os.path.join(settings.EXE_DIR, file_name)
|
file_name=request.data["fileName"],
|
||||||
|
)
|
||||||
if os.path.exists(exe):
|
|
||||||
try:
|
|
||||||
os.remove(exe)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(str(e))
|
|
||||||
|
|
||||||
goarch = "amd64" if arch == "64" else "386"
|
|
||||||
cmd = [
|
|
||||||
"env",
|
|
||||||
"GOOS=windows",
|
|
||||||
f"GOARCH={goarch}",
|
|
||||||
go_bin,
|
|
||||||
"build",
|
|
||||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
|
||||||
f"-X 'main.Api={api}'",
|
|
||||||
f"-X 'main.Client={client_id}'",
|
|
||||||
f"-X 'main.Site={site_id}'",
|
|
||||||
f"-X 'main.Atype={atype}'",
|
|
||||||
f"-X 'main.Rdp={rdp}'",
|
|
||||||
f"-X 'main.Ping={ping}'",
|
|
||||||
f"-X 'main.Power={power}'",
|
|
||||||
f"-X 'main.DownloadUrl={download_url}'",
|
|
||||||
f"-X 'main.Token={token}'\"",
|
|
||||||
"-o",
|
|
||||||
exe,
|
|
||||||
]
|
|
||||||
|
|
||||||
build_error = False
|
|
||||||
gen_error = False
|
|
||||||
|
|
||||||
gen = [
|
|
||||||
"env",
|
|
||||||
"GOOS=windows",
|
|
||||||
f"GOARCH={goarch}",
|
|
||||||
go_bin,
|
|
||||||
"generate",
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
r1 = subprocess.run(
|
|
||||||
" ".join(gen),
|
|
||||||
capture_output=True,
|
|
||||||
shell=True,
|
|
||||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
gen_error = True
|
|
||||||
logger.error(str(e))
|
|
||||||
return Response(
|
|
||||||
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
|
|
||||||
)
|
|
||||||
|
|
||||||
if r1.returncode != 0:
|
|
||||||
gen_error = True
|
|
||||||
if r1.stdout:
|
|
||||||
logger.error(r1.stdout.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
if r1.stderr:
|
|
||||||
logger.error(r1.stderr.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
logger.error(f"Go build failed with return code {r1.returncode}")
|
|
||||||
|
|
||||||
if gen_error:
|
|
||||||
return Response(
|
|
||||||
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = subprocess.run(
|
|
||||||
" ".join(cmd),
|
|
||||||
capture_output=True,
|
|
||||||
shell=True,
|
|
||||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
build_error = True
|
|
||||||
logger.error(str(e))
|
|
||||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
|
||||||
|
|
||||||
if r.returncode != 0:
|
|
||||||
build_error = True
|
|
||||||
if r.stdout:
|
|
||||||
logger.error(r.stdout.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
if r.stderr:
|
|
||||||
logger.error(r.stderr.decode("utf-8", errors="ignore"))
|
|
||||||
|
|
||||||
logger.error(f"Go build failed with return code {r.returncode}")
|
|
||||||
|
|
||||||
if build_error:
|
|
||||||
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
|
|
||||||
|
|
||||||
if settings.DEBUG:
|
|
||||||
with open(exe, "rb") as f:
|
|
||||||
response = HttpResponse(
|
|
||||||
f.read(),
|
|
||||||
content_type="application/vnd.microsoft.portable-executable",
|
|
||||||
)
|
|
||||||
response["Content-Disposition"] = f"inline; filename={file_name}"
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
response = HttpResponse()
|
|
||||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
|
||||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
|
||||||
return response
|
|
||||||
|
|
||||||
elif request.data["installMethod"] == "manual":
|
elif request.data["installMethod"] == "manual":
|
||||||
cmd = [
|
cmd = [
|
||||||
@@ -638,22 +525,14 @@ def recover(request):
|
|||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
mode = request.data["mode"]
|
mode = request.data["mode"]
|
||||||
|
|
||||||
if pyver.parse(agent.version) <= pyver.parse("0.9.5"):
|
# attempt a realtime recovery, otherwise fall back to old recovery method
|
||||||
return notify_error("Only available in agent version greater than 0.9.5")
|
if mode == "tacagent" or mode == "mesh":
|
||||||
|
data = {"func": "recover", "payload": {"mode": mode}}
|
||||||
|
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||||
|
if r == "ok":
|
||||||
|
return Response("Successfully completed recovery")
|
||||||
|
|
||||||
if not agent.has_nats:
|
if agent.recoveryactions.filter(last_run=None).exists(): # type: ignore
|
||||||
if mode == "tacagent" or mode == "rpc":
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
|
|
||||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
|
||||||
if agent.has_nats:
|
|
||||||
if mode == "tacagent" or mode == "mesh":
|
|
||||||
data = {"func": "recover", "payload": {"mode": mode}}
|
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
|
||||||
if r == "ok":
|
|
||||||
return Response("Successfully completed recovery")
|
|
||||||
|
|
||||||
if agent.recoveryactions.filter(last_run=None).exists():
|
|
||||||
return notify_error(
|
return notify_error(
|
||||||
"A recovery action is currently pending. Please wait for the next agent check-in."
|
"A recovery action is currently pending. Please wait for the next agent check-in."
|
||||||
)
|
)
|
||||||
@@ -679,12 +558,12 @@ def recover(request):
|
|||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated, RunScriptPerms])
|
||||||
def run_script(request):
|
def run_script(request):
|
||||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||||
output = request.data["output"]
|
output = request.data["output"]
|
||||||
|
args = request.data["args"]
|
||||||
req_timeout = int(request.data["timeout"]) + 3
|
req_timeout = int(request.data["timeout"]) + 3
|
||||||
|
|
||||||
AuditLog.audit_script_run(
|
AuditLog.audit_script_run(
|
||||||
@@ -694,13 +573,12 @@ def run_script(request):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if output == "wait":
|
if output == "wait":
|
||||||
r = agent.run_script(scriptpk=script.pk, timeout=req_timeout, wait=True)
|
r = agent.run_script(
|
||||||
|
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True
|
||||||
|
)
|
||||||
return Response(r)
|
return Response(r)
|
||||||
|
|
||||||
elif output == "email":
|
elif output == "email":
|
||||||
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
|
||||||
return notify_error("Requires agent version 1.1.12 or greater")
|
|
||||||
|
|
||||||
emails = (
|
emails = (
|
||||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||||
)
|
)
|
||||||
@@ -709,9 +587,10 @@ def run_script(request):
|
|||||||
scriptpk=script.pk,
|
scriptpk=script.pk,
|
||||||
nats_timeout=req_timeout,
|
nats_timeout=req_timeout,
|
||||||
emails=emails,
|
emails=emails,
|
||||||
|
args=args,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
agent.run_script(scriptpk=script.pk, timeout=req_timeout)
|
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout)
|
||||||
|
|
||||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||||
|
|
||||||
@@ -719,11 +598,8 @@ def run_script(request):
|
|||||||
@api_view()
|
@api_view()
|
||||||
def recover_mesh(request, pk):
|
def recover_mesh(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
|
|
||||||
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
||||||
r = asyncio.run(agent.nats_cmd(data, timeout=45))
|
r = asyncio.run(agent.nats_cmd(data, timeout=90))
|
||||||
if r != "ok":
|
if r != "ok":
|
||||||
return notify_error("Unable to contact the agent")
|
return notify_error("Unable to contact the agent")
|
||||||
|
|
||||||
@@ -765,6 +641,8 @@ class GetAddNotes(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetEditDeleteNote(APIView):
|
class GetEditDeleteNote(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageNotesPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
note = get_object_or_404(Note, pk=pk)
|
note = get_object_or_404(Note, pk=pk)
|
||||||
return Response(NoteSerializer(note).data)
|
return Response(NoteSerializer(note).data)
|
||||||
@@ -783,6 +661,7 @@ class GetEditDeleteNote(APIView):
|
|||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated, RunBulkPerms])
|
||||||
def bulk(request):
|
def bulk(request):
|
||||||
if request.data["target"] == "agents" and not request.data["agentPKs"]:
|
if request.data["target"] == "agents" and not request.data["agentPKs"]:
|
||||||
return notify_error("Must select at least 1 agent")
|
return notify_error("Must select at least 1 agent")
|
||||||
@@ -803,7 +682,7 @@ def bulk(request):
|
|||||||
elif request.data["monType"] == "workstations":
|
elif request.data["monType"] == "workstations":
|
||||||
q = q.filter(monitoring_type="workstation")
|
q = q.filter(monitoring_type="workstation")
|
||||||
|
|
||||||
agents: List[int] = [agent.pk for agent in q]
|
agents: list[int] = [agent.pk for agent in q]
|
||||||
|
|
||||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||||
|
|
||||||
@@ -832,49 +711,6 @@ def bulk(request):
|
|||||||
return notify_error("Something went wrong")
|
return notify_error("Something went wrong")
|
||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
|
||||||
def agent_counts(request):
|
|
||||||
|
|
||||||
server_offline_count = len(
|
|
||||||
[
|
|
||||||
agent
|
|
||||||
for agent in Agent.objects.filter(monitoring_type="server").only(
|
|
||||||
"pk",
|
|
||||||
"last_seen",
|
|
||||||
"overdue_time",
|
|
||||||
"offline_time",
|
|
||||||
)
|
|
||||||
if not agent.status == "online"
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
workstation_offline_count = len(
|
|
||||||
[
|
|
||||||
agent
|
|
||||||
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
|
||||||
"pk",
|
|
||||||
"last_seen",
|
|
||||||
"overdue_time",
|
|
||||||
"offline_time",
|
|
||||||
)
|
|
||||||
if not agent.status == "online"
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"total_server_count": Agent.objects.filter(
|
|
||||||
monitoring_type="server"
|
|
||||||
).count(),
|
|
||||||
"total_server_offline_count": server_offline_count,
|
|
||||||
"total_workstation_count": Agent.objects.filter(
|
|
||||||
monitoring_type="workstation"
|
|
||||||
).count(),
|
|
||||||
"total_workstation_offline_count": workstation_offline_count,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api_view(["POST"])
|
@api_view(["POST"])
|
||||||
def agent_maintenance(request):
|
def agent_maintenance(request):
|
||||||
if request.data["type"] == "Client":
|
if request.data["type"] == "Client":
|
||||||
@@ -901,9 +737,6 @@ def agent_maintenance(request):
|
|||||||
class WMI(APIView):
|
class WMI(APIView):
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
|
||||||
return notify_error("Requires agent version 1.1.2 or greater")
|
|
||||||
|
|
||||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||||
if r != "ok":
|
if r != "ok":
|
||||||
return notify_error("Unable to contact the agent")
|
return notify_error("Unable to contact the agent")
|
||||||
|
|||||||
@@ -1,7 +1,21 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from agents.models import Agent
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
SEVERITY_CHOICES = [
|
SEVERITY_CHOICES = [
|
||||||
("info", "Informational"),
|
("info", "Informational"),
|
||||||
@@ -78,7 +92,7 @@ class Alert(models.Model):
|
|||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_availability_alert(cls, agent):
|
def create_or_return_availability_alert(cls, agent):
|
||||||
if not cls.objects.filter(agent=agent, resolved=False).exists():
|
if not cls.objects.filter(agent=agent, resolved=False).exists():
|
||||||
return cls.objects.create(
|
return cls.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
@@ -87,9 +101,11 @@ class Alert(models.Model):
|
|||||||
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
return cls.objects.get(agent=agent, resolved=False)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_check_alert(cls, check):
|
def create_or_return_check_alert(cls, check):
|
||||||
|
|
||||||
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
|
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
|
||||||
return cls.objects.create(
|
return cls.objects.create(
|
||||||
@@ -99,9 +115,11 @@ class Alert(models.Model):
|
|||||||
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
return cls.objects.get(assigned_check=check, resolved=False)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_task_alert(cls, task):
|
def create_or_return_task_alert(cls, task):
|
||||||
|
|
||||||
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
|
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
|
||||||
return cls.objects.create(
|
return cls.objects.create(
|
||||||
@@ -111,10 +129,335 @@ class Alert(models.Model):
|
|||||||
message=f"{task.agent.hostname} has task: {task.name} that failed.",
|
message=f"{task.agent.hostname} has task: {task.name} that failed.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
return cls.objects.get(assigned_task=task, resolved=False)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_custom_alert(cls, custom):
|
def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
||||||
pass
|
from agents.models import Agent
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
# set variables
|
||||||
|
dashboard_severities = None
|
||||||
|
email_severities = None
|
||||||
|
text_severities = None
|
||||||
|
always_dashboard = None
|
||||||
|
always_email = None
|
||||||
|
always_text = None
|
||||||
|
alert_interval = None
|
||||||
|
email_task = None
|
||||||
|
text_task = None
|
||||||
|
|
||||||
|
# check what the instance passed is
|
||||||
|
if isinstance(instance, Agent):
|
||||||
|
from agents.tasks import agent_outage_email_task, agent_outage_sms_task
|
||||||
|
|
||||||
|
email_task = agent_outage_email_task
|
||||||
|
text_task = agent_outage_sms_task
|
||||||
|
|
||||||
|
email_alert = instance.overdue_email_alert
|
||||||
|
text_alert = instance.overdue_text_alert
|
||||||
|
dashboard_alert = instance.overdue_dashboard_alert
|
||||||
|
alert_template = instance.alert_template
|
||||||
|
maintenance_mode = instance.maintenance_mode
|
||||||
|
alert_severity = "error"
|
||||||
|
agent = instance
|
||||||
|
|
||||||
|
# set alert_template settings
|
||||||
|
if alert_template:
|
||||||
|
dashboard_severities = ["error"]
|
||||||
|
email_severities = ["error"]
|
||||||
|
text_severities = ["error"]
|
||||||
|
always_dashboard = alert_template.agent_always_alert
|
||||||
|
always_email = alert_template.agent_always_email
|
||||||
|
always_text = alert_template.agent_always_text
|
||||||
|
alert_interval = alert_template.agent_periodic_alert_days
|
||||||
|
|
||||||
|
if instance.should_create_alert(alert_template):
|
||||||
|
alert = cls.create_or_return_availability_alert(instance)
|
||||||
|
else:
|
||||||
|
# check if there is an alert that exists
|
||||||
|
if cls.objects.filter(agent=instance, resolved=False).exists():
|
||||||
|
alert = cls.objects.get(agent=instance, resolved=False)
|
||||||
|
else:
|
||||||
|
alert = None
|
||||||
|
|
||||||
|
elif isinstance(instance, Check):
|
||||||
|
from checks.tasks import (
|
||||||
|
handle_check_email_alert_task,
|
||||||
|
handle_check_sms_alert_task,
|
||||||
|
)
|
||||||
|
|
||||||
|
email_task = handle_check_email_alert_task
|
||||||
|
text_task = handle_check_sms_alert_task
|
||||||
|
|
||||||
|
email_alert = instance.email_alert
|
||||||
|
text_alert = instance.text_alert
|
||||||
|
dashboard_alert = instance.dashboard_alert
|
||||||
|
alert_template = instance.agent.alert_template
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
alert_severity = instance.alert_severity
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
# set alert_template settings
|
||||||
|
if alert_template:
|
||||||
|
dashboard_severities = alert_template.check_dashboard_alert_severity
|
||||||
|
email_severities = alert_template.check_email_alert_severity
|
||||||
|
text_severities = alert_template.check_text_alert_severity
|
||||||
|
always_dashboard = alert_template.check_always_alert
|
||||||
|
always_email = alert_template.check_always_email
|
||||||
|
always_text = alert_template.check_always_text
|
||||||
|
alert_interval = alert_template.check_periodic_alert_days
|
||||||
|
|
||||||
|
if instance.should_create_alert(alert_template):
|
||||||
|
alert = cls.create_or_return_check_alert(instance)
|
||||||
|
else:
|
||||||
|
# check if there is an alert that exists
|
||||||
|
if cls.objects.filter(assigned_check=instance, resolved=False).exists():
|
||||||
|
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
||||||
|
else:
|
||||||
|
alert = None
|
||||||
|
|
||||||
|
elif isinstance(instance, AutomatedTask):
|
||||||
|
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
||||||
|
|
||||||
|
email_task = handle_task_email_alert
|
||||||
|
text_task = handle_task_sms_alert
|
||||||
|
|
||||||
|
email_alert = instance.email_alert
|
||||||
|
text_alert = instance.text_alert
|
||||||
|
dashboard_alert = instance.dashboard_alert
|
||||||
|
alert_template = instance.agent.alert_template
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
alert_severity = instance.alert_severity
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
# set alert_template settings
|
||||||
|
if alert_template:
|
||||||
|
dashboard_severities = alert_template.task_dashboard_alert_severity
|
||||||
|
email_severities = alert_template.task_email_alert_severity
|
||||||
|
text_severities = alert_template.task_text_alert_severity
|
||||||
|
always_dashboard = alert_template.task_always_alert
|
||||||
|
always_email = alert_template.task_always_email
|
||||||
|
always_text = alert_template.task_always_text
|
||||||
|
alert_interval = alert_template.task_periodic_alert_days
|
||||||
|
|
||||||
|
if instance.should_create_alert(alert_template):
|
||||||
|
alert = cls.create_or_return_task_alert(instance)
|
||||||
|
else:
|
||||||
|
# check if there is an alert that exists
|
||||||
|
if cls.objects.filter(assigned_task=instance, resolved=False).exists():
|
||||||
|
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
||||||
|
else:
|
||||||
|
alert = None
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
# return if agent is in maintenance mode
|
||||||
|
if maintenance_mode or not alert:
|
||||||
|
return
|
||||||
|
|
||||||
|
# check if alert severity changed on check and update the alert
|
||||||
|
if alert_severity != alert.severity:
|
||||||
|
alert.severity = alert_severity
|
||||||
|
alert.save(update_fields=["severity"])
|
||||||
|
|
||||||
|
# create alert in dashboard if enabled
|
||||||
|
if dashboard_alert or always_dashboard:
|
||||||
|
|
||||||
|
# check if alert template is set and specific severities are configured
|
||||||
|
if alert_template and alert.severity not in dashboard_severities: # type: ignore
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
alert.hidden = False
|
||||||
|
alert.save()
|
||||||
|
|
||||||
|
# send email if enabled
|
||||||
|
if email_alert or always_email:
|
||||||
|
|
||||||
|
# check if alert template is set and specific severities are configured
|
||||||
|
if alert_template and alert.severity not in email_severities: # type: ignore
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
email_task.delay(
|
||||||
|
pk=alert.pk,
|
||||||
|
alert_interval=alert_interval,
|
||||||
|
)
|
||||||
|
|
||||||
|
# send text if enabled
|
||||||
|
if text_alert or always_text:
|
||||||
|
|
||||||
|
# check if alert template is set and specific severities are configured
|
||||||
|
if alert_template and alert.severity not in text_severities: # type: ignore
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||||
|
|
||||||
|
# check if any scripts should be run
|
||||||
|
if alert_template and alert_template.action and not alert.action_run:
|
||||||
|
r = agent.run_script(
|
||||||
|
scriptpk=alert_template.action.pk,
|
||||||
|
args=alert.parse_script_args(alert_template.action_args),
|
||||||
|
timeout=alert_template.action_timeout,
|
||||||
|
wait=True,
|
||||||
|
full=True,
|
||||||
|
run_on_any=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# command was successful
|
||||||
|
if type(r) == dict:
|
||||||
|
alert.action_retcode = r["retcode"]
|
||||||
|
alert.action_stdout = r["stdout"]
|
||||||
|
alert.action_stderr = r["stderr"]
|
||||||
|
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
||||||
|
alert.action_run = djangotime.now()
|
||||||
|
alert.save()
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
||||||
|
from agents.models import Agent
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
# set variables
|
||||||
|
email_on_resolved = False
|
||||||
|
text_on_resolved = False
|
||||||
|
resolved_email_task = None
|
||||||
|
resolved_text_task = None
|
||||||
|
|
||||||
|
# check what the instance passed is
|
||||||
|
if isinstance(instance, Agent):
|
||||||
|
from agents.tasks import agent_recovery_email_task, agent_recovery_sms_task
|
||||||
|
|
||||||
|
resolved_email_task = agent_recovery_email_task
|
||||||
|
resolved_text_task = agent_recovery_sms_task
|
||||||
|
|
||||||
|
alert_template = instance.alert_template
|
||||||
|
alert = cls.objects.get(agent=instance, resolved=False)
|
||||||
|
maintenance_mode = instance.maintenance_mode
|
||||||
|
agent = instance
|
||||||
|
|
||||||
|
if alert_template:
|
||||||
|
email_on_resolved = alert_template.agent_email_on_resolved
|
||||||
|
text_on_resolved = alert_template.agent_text_on_resolved
|
||||||
|
|
||||||
|
elif isinstance(instance, Check):
|
||||||
|
from checks.tasks import (
|
||||||
|
handle_resolved_check_email_alert_task,
|
||||||
|
handle_resolved_check_sms_alert_task,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved_email_task = handle_resolved_check_email_alert_task
|
||||||
|
resolved_text_task = handle_resolved_check_sms_alert_task
|
||||||
|
|
||||||
|
alert_template = instance.agent.alert_template
|
||||||
|
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
if alert_template:
|
||||||
|
email_on_resolved = alert_template.check_email_on_resolved
|
||||||
|
text_on_resolved = alert_template.check_text_on_resolved
|
||||||
|
|
||||||
|
elif isinstance(instance, AutomatedTask):
|
||||||
|
from autotasks.tasks import (
|
||||||
|
handle_resolved_task_email_alert,
|
||||||
|
handle_resolved_task_sms_alert,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolved_email_task = handle_resolved_task_email_alert
|
||||||
|
resolved_text_task = handle_resolved_task_sms_alert
|
||||||
|
|
||||||
|
alert_template = instance.agent.alert_template
|
||||||
|
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
||||||
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
|
agent = instance.agent
|
||||||
|
|
||||||
|
if alert_template:
|
||||||
|
email_on_resolved = alert_template.task_email_on_resolved
|
||||||
|
text_on_resolved = alert_template.task_text_on_resolved
|
||||||
|
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
# return if agent is in maintenance mode
|
||||||
|
if maintenance_mode:
|
||||||
|
return
|
||||||
|
|
||||||
|
alert.resolve()
|
||||||
|
|
||||||
|
# check if a resolved email notification should be send
|
||||||
|
if email_on_resolved and not alert.resolved_email_sent:
|
||||||
|
resolved_email_task.delay(pk=alert.pk)
|
||||||
|
|
||||||
|
# check if resolved text should be sent
|
||||||
|
if text_on_resolved and not alert.resolved_sms_sent:
|
||||||
|
resolved_text_task.delay(pk=alert.pk)
|
||||||
|
|
||||||
|
# check if resolved script should be run
|
||||||
|
if (
|
||||||
|
alert_template
|
||||||
|
and alert_template.resolved_action
|
||||||
|
and not alert.resolved_action_run
|
||||||
|
):
|
||||||
|
r = agent.run_script(
|
||||||
|
scriptpk=alert_template.resolved_action.pk,
|
||||||
|
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||||
|
timeout=alert_template.resolved_action_timeout,
|
||||||
|
wait=True,
|
||||||
|
full=True,
|
||||||
|
run_on_any=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# command was successful
|
||||||
|
if type(r) == dict:
|
||||||
|
alert.resolved_action_retcode = r["retcode"]
|
||||||
|
alert.resolved_action_stdout = r["stdout"]
|
||||||
|
alert.resolved_action_stderr = r["stderr"]
|
||||||
|
alert.resolved_action_execution_time = "{:.4f}".format(
|
||||||
|
r["execution_time"]
|
||||||
|
)
|
||||||
|
alert.resolved_action_run = djangotime.now()
|
||||||
|
alert.save()
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||||
|
)
|
||||||
|
|
||||||
|
def parse_script_args(self, args: list[str]):
|
||||||
|
|
||||||
|
if not args:
|
||||||
|
return []
|
||||||
|
|
||||||
|
temp_args = list()
|
||||||
|
# pattern to match for injection
|
||||||
|
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||||
|
|
||||||
|
for arg in args:
|
||||||
|
match = pattern.match(arg)
|
||||||
|
if match:
|
||||||
|
name = match.group(1)
|
||||||
|
|
||||||
|
if hasattr(self, name):
|
||||||
|
value = f"'{getattr(self, name)}'"
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
temp_args.append(arg)
|
||||||
|
|
||||||
|
return temp_args
|
||||||
|
|
||||||
|
|
||||||
class AlertTemplate(models.Model):
|
class AlertTemplate(models.Model):
|
||||||
@@ -283,4 +626,4 @@ class AlertTemplate(models.Model):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def is_default_template(self) -> bool:
|
def is_default_template(self) -> bool:
|
||||||
return self.default_alert_template.exists()
|
return self.default_alert_template.exists() # type: ignore
|
||||||
|
|||||||
11
api/tacticalrmm/alerts/permissions.py
Normal file
11
api/tacticalrmm/alerts/permissions.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm
|
||||||
|
|
||||||
|
|
||||||
|
class ManageAlertsPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET" or r.method == "PATCH":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_alerts")
|
||||||
@@ -12,3 +12,13 @@ def unsnooze_alerts() -> str:
|
|||||||
)
|
)
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def cache_agents_alert_template():
|
||||||
|
from agents.models import Agent
|
||||||
|
|
||||||
|
for agent in Agent.objects.only("pk"):
|
||||||
|
agent.set_alert_template()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -3,20 +3,25 @@ from datetime import datetime as dt
|
|||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .models import Alert, AlertTemplate
|
from .models import Alert, AlertTemplate
|
||||||
|
from .permissions import ManageAlertsPerms
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
AlertSerializer,
|
AlertSerializer,
|
||||||
AlertTemplateRelationSerializer,
|
AlertTemplateRelationSerializer,
|
||||||
AlertTemplateSerializer,
|
AlertTemplateSerializer,
|
||||||
)
|
)
|
||||||
|
from .tasks import cache_agents_alert_template
|
||||||
|
|
||||||
|
|
||||||
class GetAddAlerts(APIView):
|
class GetAddAlerts(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
|
|
||||||
# top 10 alerts for dashboard icon
|
# top 10 alerts for dashboard icon
|
||||||
@@ -108,6 +113,8 @@ class GetAddAlerts(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteAlert(APIView):
|
class GetUpdateDeleteAlert(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
alert = get_object_or_404(Alert, pk=pk)
|
alert = get_object_or_404(Alert, pk=pk)
|
||||||
|
|
||||||
@@ -162,6 +169,8 @@ class GetUpdateDeleteAlert(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class BulkAlerts(APIView):
|
class BulkAlerts(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
if request.data["bulk_action"] == "resolve":
|
if request.data["bulk_action"] == "resolve":
|
||||||
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||||
@@ -184,6 +193,8 @@ class BulkAlerts(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetAddAlertTemplates(APIView):
|
class GetAddAlertTemplates(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
alert_templates = AlertTemplate.objects.all()
|
alert_templates = AlertTemplate.objects.all()
|
||||||
|
|
||||||
@@ -194,10 +205,15 @@ class GetAddAlertTemplates(APIView):
|
|||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
|
# cache alert_template value on agents
|
||||||
|
cache_agents_alert_template.delay()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteAlertTemplate(APIView):
|
class GetUpdateDeleteAlertTemplate(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||||
|
|
||||||
@@ -212,11 +228,17 @@ class GetUpdateDeleteAlertTemplate(APIView):
|
|||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
|
# cache alert_template value on agents
|
||||||
|
cache_agents_alert_template.delay()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
get_object_or_404(AlertTemplate, pk=pk).delete()
|
get_object_or_404(AlertTemplate, pk=pk).delete()
|
||||||
|
|
||||||
|
# cache alert_template value on agents
|
||||||
|
cache_agents_alert_template.delay()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from itertools import cycle
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
|
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -18,8 +19,44 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
def test_get_checks(self):
|
def test_get_checks(self):
|
||||||
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"
|
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"
|
||||||
|
|
||||||
|
# add a check
|
||||||
|
check1 = baker.make_recipe("checks.ping_check", agent=self.agent)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["check_interval"], self.agent.check_interval) # type: ignore
|
||||||
|
self.assertEqual(len(r.data["checks"]), 1) # type: ignore
|
||||||
|
|
||||||
|
# override check run interval
|
||||||
|
check2 = baker.make_recipe(
|
||||||
|
"checks.ping_check", agent=self.agent, run_interval=20
|
||||||
|
)
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
||||||
|
self.assertEqual(len(r.data["checks"]), 2) # type: ignore
|
||||||
|
|
||||||
|
# Set last_run on both checks and should return an empty list
|
||||||
|
check1.last_run = djangotime.now()
|
||||||
|
check1.save()
|
||||||
|
check2.last_run = djangotime.now()
|
||||||
|
check2.save()
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
||||||
|
self.assertFalse(r.data["checks"]) # type: ignore
|
||||||
|
|
||||||
|
# set last_run greater than interval
|
||||||
|
check1.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
||||||
|
check1.save()
|
||||||
|
check2.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
||||||
|
check2.save()
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
||||||
|
self.assertEquals(len(r.data["checks"]), 2) # type: ignore
|
||||||
|
|
||||||
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
|
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
@@ -53,3 +90,253 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
r.json(),
|
r.json(),
|
||||||
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# add check to agent with check interval set
|
||||||
|
check = baker.make_recipe(
|
||||||
|
"checks.ping_check", agent=self.agent, run_interval=30
|
||||||
|
)
|
||||||
|
|
||||||
|
r = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
r.json(),
|
||||||
|
{"agent": self.agent.pk, "check_interval": 30},
|
||||||
|
)
|
||||||
|
|
||||||
|
# minimum check run interval is 15 seconds
|
||||||
|
check = baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=5)
|
||||||
|
|
||||||
|
r = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
r.json(),
|
||||||
|
{"agent": self.agent.pk, "check_interval": 15},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_run_checks(self):
|
||||||
|
# force run all checks regardless of interval
|
||||||
|
agent = baker.make_recipe("agents.online_agent")
|
||||||
|
baker.make_recipe("checks.ping_check", agent=agent)
|
||||||
|
baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||||
|
baker.make_recipe("checks.cpuload_check", agent=agent)
|
||||||
|
baker.make_recipe("checks.memory_check", agent=agent)
|
||||||
|
baker.make_recipe("checks.eventlog_check", agent=agent)
|
||||||
|
for _ in range(10):
|
||||||
|
baker.make_recipe("checks.script_check", agent=agent)
|
||||||
|
|
||||||
|
url = f"/api/v3/{agent.agent_id}/runchecks/"
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.json()["agent"], agent.pk)
|
||||||
|
self.assertIsInstance(r.json()["check_interval"], int)
|
||||||
|
self.assertEqual(len(r.json()["checks"]), 15)
|
||||||
|
|
||||||
|
def test_checkin_patch(self):
|
||||||
|
from logs.models import PendingAction
|
||||||
|
|
||||||
|
url = "/api/v3/checkin/"
|
||||||
|
agent_updated = baker.make_recipe("agents.agent", version="1.3.0")
|
||||||
|
PendingAction.objects.create(
|
||||||
|
agent=agent_updated,
|
||||||
|
action_type="agentupdate",
|
||||||
|
details={
|
||||||
|
"url": agent_updated.winagent_dl,
|
||||||
|
"version": agent_updated.version,
|
||||||
|
"inno": agent_updated.win_inno_exe,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
|
# test agent failed to update and still on same version
|
||||||
|
payload = {
|
||||||
|
"func": "hello",
|
||||||
|
"agent_id": agent_updated.agent_id,
|
||||||
|
"version": "1.3.0",
|
||||||
|
}
|
||||||
|
r = self.client.patch(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||||
|
self.assertEqual(action.status, "pending")
|
||||||
|
|
||||||
|
# test agent successful update
|
||||||
|
payload["version"] = settings.LATEST_AGENT_VER
|
||||||
|
r = self.client.patch(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||||
|
self.assertEqual(action.status, "completed")
|
||||||
|
action.delete()
|
||||||
|
|
||||||
|
@patch("apiv3.views.reload_nats")
|
||||||
|
def test_agent_recovery(self, reload_nats):
|
||||||
|
reload_nats.return_value = "ok"
|
||||||
|
r = self.client.get("/api/v3/34jahsdkjasncASDjhg2b3j4r/recover/")
|
||||||
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
|
agent = baker.make_recipe("agents.online_agent")
|
||||||
|
url = f"/api/v3/{agent.agent_id}/recovery/"
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.json(), {"mode": "pass", "shellcmd": ""})
|
||||||
|
reload_nats.assert_not_called()
|
||||||
|
|
||||||
|
baker.make("agents.RecoveryAction", agent=agent, mode="mesh")
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.json(), {"mode": "mesh", "shellcmd": ""})
|
||||||
|
reload_nats.assert_not_called()
|
||||||
|
|
||||||
|
baker.make(
|
||||||
|
"agents.RecoveryAction",
|
||||||
|
agent=agent,
|
||||||
|
mode="command",
|
||||||
|
command="shutdown /r /t 5 /f",
|
||||||
|
)
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
r.json(), {"mode": "command", "shellcmd": "shutdown /r /t 5 /f"}
|
||||||
|
)
|
||||||
|
reload_nats.assert_not_called()
|
||||||
|
|
||||||
|
baker.make("agents.RecoveryAction", agent=agent, mode="rpc")
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.json(), {"mode": "rpc", "shellcmd": ""})
|
||||||
|
reload_nats.assert_called_once()
|
||||||
|
|
||||||
|
def test_task_runner_get(self):
|
||||||
|
from autotasks.serializers import TaskGOGetSerializer
|
||||||
|
|
||||||
|
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||||
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
|
# setup data
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
script = baker.make_recipe("scripts.script")
|
||||||
|
task = baker.make("autotasks.AutomatedTask", agent=agent, script=script)
|
||||||
|
|
||||||
|
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(TaskGOGetSerializer(task).data, r.data) # type: ignore
|
||||||
|
|
||||||
|
def test_task_runner_results(self):
|
||||||
|
from agents.models import AgentCustomField
|
||||||
|
|
||||||
|
r = self.client.patch("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||||
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
|
# setup data
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||||
|
|
||||||
|
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
||||||
|
|
||||||
|
# test passing task
|
||||||
|
data = {
|
||||||
|
"stdout": "test test \ntestest stdgsd\n",
|
||||||
|
"stderr": "",
|
||||||
|
"retcode": 0,
|
||||||
|
"execution_time": 3.560,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "passing") # type: ignore
|
||||||
|
|
||||||
|
# test failing task
|
||||||
|
data = {
|
||||||
|
"stdout": "test test \ntestest stdgsd\n",
|
||||||
|
"stderr": "",
|
||||||
|
"retcode": 1,
|
||||||
|
"execution_time": 3.560,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
||||||
|
|
||||||
|
# test collector task
|
||||||
|
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
|
||||||
|
boolean = baker.make(
|
||||||
|
"core.CustomField", model="agent", type="checkbox", name="Test1"
|
||||||
|
)
|
||||||
|
multiple = baker.make(
|
||||||
|
"core.CustomField", model="agent", type="multiple", name="Test2"
|
||||||
|
)
|
||||||
|
|
||||||
|
# test text fields
|
||||||
|
task.custom_field = text # type: ignore
|
||||||
|
task.save() # type: ignore
|
||||||
|
|
||||||
|
# test failing failing with stderr
|
||||||
|
data = {
|
||||||
|
"stdout": "test test \nthe last line",
|
||||||
|
"stderr": "This is an error",
|
||||||
|
"retcode": 1,
|
||||||
|
"execution_time": 3.560,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
||||||
|
|
||||||
|
# test saving to text field
|
||||||
|
data = {
|
||||||
|
"stdout": "test test \nthe last line",
|
||||||
|
"stderr": "",
|
||||||
|
"retcode": 0,
|
||||||
|
"execution_time": 3.560,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||||
|
self.assertEqual(AgentCustomField.objects.get(field=text, agent=task.agent).value, "the last line") # type: ignore
|
||||||
|
|
||||||
|
# test saving to checkbox field
|
||||||
|
task.custom_field = boolean # type: ignore
|
||||||
|
task.save() # type: ignore
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"stdout": "1",
|
||||||
|
"stderr": "",
|
||||||
|
"retcode": 0,
|
||||||
|
"execution_time": 3.560,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||||
|
self.assertTrue(AgentCustomField.objects.get(field=boolean, agent=task.agent).value) # type: ignore
|
||||||
|
|
||||||
|
# test saving to multiple field with commas
|
||||||
|
task.custom_field = multiple # type: ignore
|
||||||
|
task.save() # type: ignore
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"stdout": "this,is,an,array",
|
||||||
|
"stderr": "",
|
||||||
|
"retcode": 0,
|
||||||
|
"execution_time": 3.560,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||||
|
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this", "is", "an", "array"]) # type: ignore
|
||||||
|
|
||||||
|
# test mutiple with a single value
|
||||||
|
data = {
|
||||||
|
"stdout": "this",
|
||||||
|
"stderr": "",
|
||||||
|
"retcode": 0,
|
||||||
|
"execution_time": 3.560,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.client.patch(url, data)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||||
|
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this"]) # type: ignore
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from . import views
|
|||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("checkrunner/", views.CheckRunner.as_view()),
|
path("checkrunner/", views.CheckRunner.as_view()),
|
||||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||||
|
path("<str:agentid>/runchecks/", views.RunChecks.as_view()),
|
||||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||||
path("meshexe/", views.MeshExe.as_view()),
|
path("meshexe/", views.MeshExe.as_view()),
|
||||||
@@ -17,4 +18,6 @@ urlpatterns = [
|
|||||||
path("choco/", views.Choco.as_view()),
|
path("choco/", views.Choco.as_view()),
|
||||||
path("winupdates/", views.WinUpdates.as_view()),
|
path("winupdates/", views.WinUpdates.as_view()),
|
||||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||||
|
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||||
|
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -15,13 +15,14 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
from agents.models import Agent
|
from agents.models import Agent, AgentCustomField
|
||||||
from agents.serializers import WinAgentSerializer
|
from agents.serializers import WinAgentSerializer
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||||
from checks.models import Check
|
from checks.models import Check
|
||||||
from checks.serializers import CheckRunnerGetSerializer
|
from checks.serializers import CheckRunnerGetSerializer
|
||||||
from checks.utils import bytes2human
|
from checks.utils import bytes2human
|
||||||
|
from logs.models import PendingAction
|
||||||
from software.models import InstalledSoftware
|
from software.models import InstalledSoftware
|
||||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||||
@@ -35,6 +36,8 @@ class CheckIn(APIView):
|
|||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
updated = False
|
updated = False
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
if pyver.parse(request.data["version"]) > pyver.parse(
|
if pyver.parse(request.data["version"]) > pyver.parse(
|
||||||
@@ -50,27 +53,29 @@ class CheckIn(APIView):
|
|||||||
# change agent update pending status to completed if agent has just updated
|
# change agent update pending status to completed if agent has just updated
|
||||||
if (
|
if (
|
||||||
updated
|
updated
|
||||||
and agent.pendingactions.filter(
|
and agent.pendingactions.filter( # type: ignore
|
||||||
action_type="agentupdate", status="pending"
|
action_type="agentupdate", status="pending"
|
||||||
).exists()
|
).exists()
|
||||||
):
|
):
|
||||||
agent.pendingactions.filter(
|
agent.pendingactions.filter( # type: ignore
|
||||||
action_type="agentupdate", status="pending"
|
action_type="agentupdate", status="pending"
|
||||||
).update(status="completed")
|
).update(status="completed")
|
||||||
|
|
||||||
# handles any alerting actions
|
# handles any alerting actions
|
||||||
agent.handle_alert(checkin=True)
|
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||||
|
Alert.handle_alert_resolve(agent)
|
||||||
|
|
||||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
# sync scheduled tasks
|
||||||
if recovery is not None:
|
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||||
recovery.last_run = djangotime.now()
|
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||||
recovery.save(update_fields=["last_run"])
|
|
||||||
handle_agent_recovery_task.delay(pk=recovery.pk)
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
# get any pending actions
|
for task in tasks:
|
||||||
if agent.pendingactions.filter(status="pending").exists():
|
if task.sync_status == "pendingdeletion":
|
||||||
agent.handle_pending_actions()
|
task.delete_task_on_agent()
|
||||||
|
elif task.sync_status == "initial":
|
||||||
|
task.modify_task_on_agent()
|
||||||
|
elif task.sync_status == "notsynced":
|
||||||
|
task.create_task_on_agent()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -111,7 +116,7 @@ class CheckIn(APIView):
|
|||||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||||
InstalledSoftware(agent=agent, software=sw).save()
|
InstalledSoftware(agent=agent, software=sw).save()
|
||||||
else:
|
else:
|
||||||
s = agent.installedsoftware_set.first()
|
s = agent.installedsoftware_set.first() # type: ignore
|
||||||
s.software = sw
|
s.software = sw
|
||||||
s.save(update_fields=["software"])
|
s.save(update_fields=["software"])
|
||||||
|
|
||||||
@@ -184,7 +189,7 @@ class WinUpdates(APIView):
|
|||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
u = agent.winupdates.filter(guid=request.data["guid"]).last()
|
u = agent.winupdates.filter(guid=request.data["guid"]).last() # type: ignore
|
||||||
success: bool = request.data["success"]
|
success: bool = request.data["success"]
|
||||||
if success:
|
if success:
|
||||||
u.result = "success"
|
u.result = "success"
|
||||||
@@ -210,8 +215,8 @@ class WinUpdates(APIView):
|
|||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
updates = request.data["wua_updates"]
|
updates = request.data["wua_updates"]
|
||||||
for update in updates:
|
for update in updates:
|
||||||
if agent.winupdates.filter(guid=update["guid"]).exists():
|
if agent.winupdates.filter(guid=update["guid"]).exists(): # type: ignore
|
||||||
u = agent.winupdates.filter(guid=update["guid"]).last()
|
u = agent.winupdates.filter(guid=update["guid"]).last() # type: ignore
|
||||||
u.downloaded = update["downloaded"]
|
u.downloaded = update["downloaded"]
|
||||||
u.installed = update["installed"]
|
u.installed = update["installed"]
|
||||||
u.save(update_fields=["downloaded", "installed"])
|
u.save(update_fields=["downloaded", "installed"])
|
||||||
@@ -242,7 +247,7 @@ class WinUpdates(APIView):
|
|||||||
|
|
||||||
# more superseded updates cleanup
|
# more superseded updates cleanup
|
||||||
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
|
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
|
||||||
for u in agent.winupdates.filter(
|
for u in agent.winupdates.filter( # type: ignore
|
||||||
date_installed__isnull=True, result="failed"
|
date_installed__isnull=True, result="failed"
|
||||||
).exclude(installed=True):
|
).exclude(installed=True):
|
||||||
u.delete()
|
u.delete()
|
||||||
@@ -256,25 +261,20 @@ class SupersededWinUpdate(APIView):
|
|||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
updates = agent.winupdates.filter(guid=request.data["guid"])
|
updates = agent.winupdates.filter(guid=request.data["guid"]) # type: ignore
|
||||||
for u in updates:
|
for u in updates:
|
||||||
u.delete()
|
u.delete()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class CheckRunner(APIView):
|
class RunChecks(APIView):
|
||||||
"""
|
|
||||||
For the windows golang agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def get(self, request, agentid):
|
def get(self, request, agentid):
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
"agent": agent.pk,
|
"agent": agent.pk,
|
||||||
"check_interval": agent.check_interval,
|
"check_interval": agent.check_interval,
|
||||||
@@ -282,13 +282,55 @@ class CheckRunner(APIView):
|
|||||||
}
|
}
|
||||||
return Response(ret)
|
return Response(ret)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckRunner(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get(self, request, agentid):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
|
checks = agent.agentchecks.filter(overriden_by_policy=False) # type: ignore
|
||||||
|
|
||||||
|
run_list = [
|
||||||
|
check
|
||||||
|
for check in checks
|
||||||
|
# always run if check hasn't run yet
|
||||||
|
if not check.last_run
|
||||||
|
# if a check interval is set, see if the correct amount of seconds have passed
|
||||||
|
or (
|
||||||
|
check.run_interval
|
||||||
|
and (
|
||||||
|
check.last_run
|
||||||
|
< djangotime.now()
|
||||||
|
- djangotime.timedelta(seconds=check.run_interval)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# if check interval isn't set, make sure the agent's check interval has passed before running
|
||||||
|
or (
|
||||||
|
not check.run_interval
|
||||||
|
and check.last_run
|
||||||
|
< djangotime.now() - djangotime.timedelta(seconds=agent.check_interval)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
ret = {
|
||||||
|
"agent": agent.pk,
|
||||||
|
"check_interval": agent.check_run_interval(),
|
||||||
|
"checks": CheckRunnerGetSerializer(run_list, many=True).data,
|
||||||
|
}
|
||||||
|
return Response(ret)
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
check = get_object_or_404(Check, pk=request.data["id"])
|
check = get_object_or_404(Check, pk=request.data["id"])
|
||||||
|
if pyver.parse(check.agent.version) < pyver.parse("1.5.7"):
|
||||||
|
return notify_error("unsupported")
|
||||||
|
|
||||||
check.last_run = djangotime.now()
|
check.last_run = djangotime.now()
|
||||||
check.save(update_fields=["last_run"])
|
check.save(update_fields=["last_run"])
|
||||||
status = check.handle_checkv2(request.data)
|
status = check.handle_check(request.data)
|
||||||
|
if status == "failing" and check.assignedtask.exists(): # type: ignore
|
||||||
|
check.handle_assigned_task()
|
||||||
|
|
||||||
return Response(status)
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class CheckRunnerInterval(APIView):
|
class CheckRunnerInterval(APIView):
|
||||||
@@ -297,14 +339,13 @@ class CheckRunnerInterval(APIView):
|
|||||||
|
|
||||||
def get(self, request, agentid):
|
def get(self, request, agentid):
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
|
|
||||||
|
return Response(
|
||||||
|
{"agent": agent.pk, "check_interval": agent.check_run_interval()}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TaskRunner(APIView):
|
class TaskRunner(APIView):
|
||||||
"""
|
|
||||||
For the windows golang agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
@@ -314,6 +355,7 @@ class TaskRunner(APIView):
|
|||||||
return Response(TaskGOGetSerializer(task).data)
|
return Response(TaskGOGetSerializer(task).data)
|
||||||
|
|
||||||
def patch(self, request, pk, agentid):
|
def patch(self, request, pk, agentid):
|
||||||
|
from alerts.models import Alert
|
||||||
from logs.models import AuditLog
|
from logs.models import AuditLog
|
||||||
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
@@ -323,10 +365,59 @@ class TaskRunner(APIView):
|
|||||||
instance=task, data=request.data, partial=True
|
instance=task, data=request.data, partial=True
|
||||||
)
|
)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save(last_run=djangotime.now())
|
new_task = serializer.save(last_run=djangotime.now())
|
||||||
|
|
||||||
new_task = AutomatedTask.objects.get(pk=task.pk)
|
# check if task is a collector and update the custom field
|
||||||
new_task.handle_alert()
|
if task.custom_field:
|
||||||
|
if not task.stderr:
|
||||||
|
|
||||||
|
if AgentCustomField.objects.filter(
|
||||||
|
field=task.custom_field, agent=task.agent
|
||||||
|
).exists():
|
||||||
|
agent_field = AgentCustomField.objects.get(
|
||||||
|
field=task.custom_field, agent=task.agent
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
agent_field = AgentCustomField.objects.create(
|
||||||
|
field=task.custom_field, agent=task.agent
|
||||||
|
)
|
||||||
|
|
||||||
|
# get last line of stdout
|
||||||
|
value = (
|
||||||
|
new_task.stdout
|
||||||
|
if task.collector_all_output
|
||||||
|
else new_task.stdout.split("\n")[-1].strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
if task.custom_field.type in [
|
||||||
|
"text",
|
||||||
|
"number",
|
||||||
|
"single",
|
||||||
|
"datetime",
|
||||||
|
]:
|
||||||
|
agent_field.string_value = value
|
||||||
|
agent_field.save()
|
||||||
|
elif task.custom_field.type == "multiple":
|
||||||
|
agent_field.multiple_value = value.split(",")
|
||||||
|
agent_field.save()
|
||||||
|
elif task.custom_field.type == "checkbox":
|
||||||
|
agent_field.bool_value = bool(value)
|
||||||
|
agent_field.save()
|
||||||
|
|
||||||
|
status = "passing"
|
||||||
|
else:
|
||||||
|
status = "failing"
|
||||||
|
else:
|
||||||
|
status = "failing" if task.retcode != 0 else "passing"
|
||||||
|
|
||||||
|
new_task.status = status
|
||||||
|
new_task.save()
|
||||||
|
|
||||||
|
if status == "passing":
|
||||||
|
if Alert.objects.filter(assigned_task=new_task, resolved=False).exists():
|
||||||
|
Alert.handle_alert_resolve(new_task)
|
||||||
|
else:
|
||||||
|
Alert.handle_alert_failure(new_task)
|
||||||
|
|
||||||
AuditLog.objects.create(
|
AuditLog.objects.create(
|
||||||
username=agent.hostname,
|
username=agent.hostname,
|
||||||
@@ -356,7 +447,7 @@ class SysInfo(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class MeshExe(APIView):
|
class MeshExe(APIView):
|
||||||
""" Sends the mesh exe to the installer """
|
"""Sends the mesh exe to the installer"""
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
exe = "meshagent.exe" if request.data["arch"] == "64" else "meshagent-x86.exe"
|
exe = "meshagent.exe" if request.data["arch"] == "64" else "meshagent-x86.exe"
|
||||||
@@ -404,10 +495,10 @@ class NewAgent(APIView):
|
|||||||
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
||||||
agent.save(update_fields=["salt_id"])
|
agent.save(update_fields=["salt_id"])
|
||||||
|
|
||||||
user = User.objects.create_user(
|
user = User.objects.create_user( # type: ignore
|
||||||
username=request.data["agent_id"],
|
username=request.data["agent_id"],
|
||||||
agent=agent,
|
agent=agent,
|
||||||
password=User.objects.make_random_password(60),
|
password=User.objects.make_random_password(60), # type: ignore
|
||||||
)
|
)
|
||||||
|
|
||||||
token = Token.objects.create(user=user)
|
token = Token.objects.create(user=user)
|
||||||
@@ -452,7 +543,7 @@ class Software(APIView):
|
|||||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||||
InstalledSoftware(agent=agent, software=sw).save()
|
InstalledSoftware(agent=agent, software=sw).save()
|
||||||
else:
|
else:
|
||||||
s = agent.installedsoftware_set.first()
|
s = agent.installedsoftware_set.first() # type: ignore
|
||||||
s.software = sw
|
s.software = sw
|
||||||
s.save(update_fields=["software"])
|
s.save(update_fields=["software"])
|
||||||
|
|
||||||
@@ -475,3 +566,59 @@ class Installer(APIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class ChocoResult(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def patch(self, request, pk):
|
||||||
|
action = get_object_or_404(PendingAction, pk=pk)
|
||||||
|
results: str = request.data["results"]
|
||||||
|
|
||||||
|
software_name = action.details["name"].lower()
|
||||||
|
success = [
|
||||||
|
"install",
|
||||||
|
"of",
|
||||||
|
software_name,
|
||||||
|
"was",
|
||||||
|
"successful",
|
||||||
|
"installed",
|
||||||
|
]
|
||||||
|
duplicate = [software_name, "already", "installed", "--force", "reinstall"]
|
||||||
|
installed = False
|
||||||
|
|
||||||
|
if all(x in results.lower() for x in success):
|
||||||
|
installed = True
|
||||||
|
elif all(x in results.lower() for x in duplicate):
|
||||||
|
installed = True
|
||||||
|
|
||||||
|
action.details["output"] = results
|
||||||
|
action.details["installed"] = installed
|
||||||
|
action.status = "completed"
|
||||||
|
action.save(update_fields=["details", "status"])
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class AgentRecovery(APIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get(self, request, agentid):
|
||||||
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
|
recovery = agent.recoveryactions.filter(last_run=None).last() # type: ignore
|
||||||
|
ret = {"mode": "pass", "shellcmd": ""}
|
||||||
|
if recovery is None:
|
||||||
|
return Response(ret)
|
||||||
|
|
||||||
|
recovery.last_run = djangotime.now()
|
||||||
|
recovery.save(update_fields=["last_run"])
|
||||||
|
|
||||||
|
ret["mode"] = recovery.mode
|
||||||
|
|
||||||
|
if recovery.mode == "command":
|
||||||
|
ret["shellcmd"] = recovery.command
|
||||||
|
elif recovery.mode == "rpc":
|
||||||
|
reload_nats()
|
||||||
|
|
||||||
|
return Response(ret)
|
||||||
|
|||||||
@@ -0,0 +1,30 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-02 04:15
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0030_agent_offline_time'),
|
||||||
|
('clients', '0009_auto_20210212_1408'),
|
||||||
|
('automation', '0007_policy_alert_template'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='policy',
|
||||||
|
name='excluded_agents',
|
||||||
|
field=models.ManyToManyField(blank=True, related_name='policy_exclusions', to='agents.Agent'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='policy',
|
||||||
|
name='excluded_clients',
|
||||||
|
field=models.ManyToManyField(blank=True, related_name='policy_exclusions', to='clients.Client'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='policy',
|
||||||
|
name='excluded_sites',
|
||||||
|
field=models.ManyToManyField(blank=True, related_name='policy_exclusions', to='clients.Site'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -17,9 +17,19 @@ class Policy(BaseAuditModel):
|
|||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
)
|
)
|
||||||
|
excluded_sites = models.ManyToManyField(
|
||||||
|
"clients.Site", related_name="policy_exclusions", blank=True
|
||||||
|
)
|
||||||
|
excluded_clients = models.ManyToManyField(
|
||||||
|
"clients.Client", related_name="policy_exclusions", blank=True
|
||||||
|
)
|
||||||
|
excluded_agents = models.ManyToManyField(
|
||||||
|
"agents.Agent", related_name="policy_exclusions", blank=True
|
||||||
|
)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
from automation.tasks import generate_agent_checks_from_policies_task
|
from alerts.tasks import cache_agents_alert_template
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
# get old policy if exists
|
# get old policy if exists
|
||||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||||
@@ -28,51 +38,85 @@ class Policy(BaseAuditModel):
|
|||||||
# generate agent checks only if active and enforced were changed
|
# generate agent checks only if active and enforced were changed
|
||||||
if old_policy:
|
if old_policy:
|
||||||
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
||||||
generate_agent_checks_from_policies_task.delay(
|
generate_agent_checks_task.delay(
|
||||||
policypk=self.pk,
|
policy=self.pk,
|
||||||
create_tasks=True,
|
create_tasks=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if old_policy.alert_template != self.alert_template:
|
||||||
|
cache_agents_alert_template.delay()
|
||||||
|
|
||||||
def delete(self, *args, **kwargs):
|
def delete(self, *args, **kwargs):
|
||||||
from automation.tasks import generate_agent_checks_task
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||||
|
|
||||||
generate_agent_checks_task.delay(agents, create_tasks=True)
|
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
||||||
|
|
||||||
@property
|
|
||||||
def is_default_server_policy(self):
|
|
||||||
return self.default_server_policy.exists()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_default_workstation_policy(self):
|
|
||||||
return self.default_workstation_policy.exists()
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_default_server_policy(self):
|
||||||
|
return self.default_server_policy.exists() # type: ignore
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_default_workstation_policy(self):
|
||||||
|
return self.default_workstation_policy.exists() # type: ignore
|
||||||
|
|
||||||
|
def is_agent_excluded(self, agent):
|
||||||
|
return (
|
||||||
|
agent in self.excluded_agents.all()
|
||||||
|
or agent.site in self.excluded_sites.all()
|
||||||
|
or agent.client in self.excluded_clients.all()
|
||||||
|
)
|
||||||
|
|
||||||
def related_agents(self):
|
def related_agents(self):
|
||||||
return self.get_related("server") | self.get_related("workstation")
|
return self.get_related("server") | self.get_related("workstation")
|
||||||
|
|
||||||
def get_related(self, mon_type):
|
def get_related(self, mon_type):
|
||||||
explicit_agents = self.agents.filter(monitoring_type=mon_type)
|
explicit_agents = (
|
||||||
explicit_clients = getattr(self, f"{mon_type}_clients").all()
|
self.agents.filter(monitoring_type=mon_type) # type: ignore
|
||||||
explicit_sites = getattr(self, f"{mon_type}_sites").all()
|
.exclude(
|
||||||
|
pk__in=self.excluded_agents.only("pk").values_list("pk", flat=True)
|
||||||
|
)
|
||||||
|
.exclude(site__in=self.excluded_sites.all())
|
||||||
|
.exclude(site__client__in=self.excluded_clients.all())
|
||||||
|
)
|
||||||
|
|
||||||
|
explicit_clients = getattr(self, f"{mon_type}_clients").exclude(
|
||||||
|
pk__in=self.excluded_clients.all()
|
||||||
|
)
|
||||||
|
explicit_sites = getattr(self, f"{mon_type}_sites").exclude(
|
||||||
|
pk__in=self.excluded_sites.all()
|
||||||
|
)
|
||||||
|
|
||||||
filtered_agents_pks = Policy.objects.none()
|
filtered_agents_pks = Policy.objects.none()
|
||||||
|
|
||||||
filtered_agents_pks |= Agent.objects.filter(
|
filtered_agents_pks |= (
|
||||||
site__in=[
|
Agent.objects.exclude(block_policy_inheritance=True)
|
||||||
site for site in explicit_sites if site.client not in explicit_clients
|
.filter(
|
||||||
],
|
site__in=[
|
||||||
monitoring_type=mon_type,
|
site
|
||||||
).values_list("pk", flat=True)
|
for site in explicit_sites
|
||||||
|
if site.client not in explicit_clients
|
||||||
|
and site.client not in self.excluded_clients.all()
|
||||||
|
],
|
||||||
|
monitoring_type=mon_type,
|
||||||
|
)
|
||||||
|
.values_list("pk", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
filtered_agents_pks |= Agent.objects.filter(
|
filtered_agents_pks |= (
|
||||||
site__client__in=[client for client in explicit_clients],
|
Agent.objects.exclude(block_policy_inheritance=True)
|
||||||
monitoring_type=mon_type,
|
.exclude(site__block_policy_inheritance=True)
|
||||||
).values_list("pk", flat=True)
|
.filter(
|
||||||
|
site__client__in=[client for client in explicit_clients],
|
||||||
|
monitoring_type=mon_type,
|
||||||
|
)
|
||||||
|
.values_list("pk", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
return Agent.objects.filter(
|
return Agent.objects.filter(
|
||||||
models.Q(pk__in=filtered_agents_pks)
|
models.Q(pk__in=filtered_agents_pks)
|
||||||
@@ -88,9 +132,6 @@ class Policy(BaseAuditModel):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cascade_policy_tasks(agent):
|
def cascade_policy_tasks(agent):
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from autotasks.tasks import delete_win_task_schedule
|
|
||||||
from logs.models import PendingAction
|
|
||||||
|
|
||||||
# List of all tasks to be applied
|
# List of all tasks to be applied
|
||||||
tasks = list()
|
tasks = list()
|
||||||
@@ -119,23 +160,50 @@ class Policy(BaseAuditModel):
|
|||||||
client_policy = client.workstation_policy
|
client_policy = client.workstation_policy
|
||||||
site_policy = site.workstation_policy
|
site_policy = site.workstation_policy
|
||||||
|
|
||||||
if agent_policy and agent_policy.active:
|
# check if client/site/agent is blocking inheritance and blank out policies
|
||||||
|
if agent.block_policy_inheritance:
|
||||||
|
site_policy = None
|
||||||
|
client_policy = None
|
||||||
|
default_policy = None
|
||||||
|
elif site.block_policy_inheritance:
|
||||||
|
client_policy = None
|
||||||
|
default_policy = None
|
||||||
|
elif client.block_policy_inheritance:
|
||||||
|
default_policy = None
|
||||||
|
|
||||||
|
if (
|
||||||
|
agent_policy
|
||||||
|
and agent_policy.active
|
||||||
|
and not agent_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
for task in agent_policy.autotasks.all():
|
for task in agent_policy.autotasks.all():
|
||||||
if task.pk not in added_task_pks:
|
if task.pk not in added_task_pks:
|
||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
added_task_pks.append(task.pk)
|
added_task_pks.append(task.pk)
|
||||||
if site_policy and site_policy.active:
|
if (
|
||||||
|
site_policy
|
||||||
|
and site_policy.active
|
||||||
|
and not site_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
for task in site_policy.autotasks.all():
|
for task in site_policy.autotasks.all():
|
||||||
if task.pk not in added_task_pks:
|
if task.pk not in added_task_pks:
|
||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
added_task_pks.append(task.pk)
|
added_task_pks.append(task.pk)
|
||||||
if client_policy and client_policy.active:
|
if (
|
||||||
|
client_policy
|
||||||
|
and client_policy.active
|
||||||
|
and not client_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
for task in client_policy.autotasks.all():
|
for task in client_policy.autotasks.all():
|
||||||
if task.pk not in added_task_pks:
|
if task.pk not in added_task_pks:
|
||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
added_task_pks.append(task.pk)
|
added_task_pks.append(task.pk)
|
||||||
|
|
||||||
if default_policy and default_policy.active:
|
if (
|
||||||
|
default_policy
|
||||||
|
and default_policy.active
|
||||||
|
and not default_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
for task in default_policy.autotasks.all():
|
for task in default_policy.autotasks.all():
|
||||||
if task.pk not in added_task_pks:
|
if task.pk not in added_task_pks:
|
||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
@@ -149,26 +217,16 @@ class Policy(BaseAuditModel):
|
|||||||
if taskpk not in added_task_pks
|
if taskpk not in added_task_pks
|
||||||
]
|
]
|
||||||
):
|
):
|
||||||
delete_win_task_schedule.delay(task.pk)
|
if task.sync_status == "initial":
|
||||||
|
task.delete()
|
||||||
|
else:
|
||||||
|
task.sync_status = "pendingdeletion"
|
||||||
|
task.save()
|
||||||
|
|
||||||
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
# change tasks from pendingdeletion to notsynced if policy was added or changed
|
||||||
for action in agent.pendingactions.filter(action_type="taskaction").exclude(
|
agent.autotasks.filter(sync_status="pendingdeletion").filter(
|
||||||
status="completed"
|
parent_task__in=[taskpk for taskpk in added_task_pks]
|
||||||
):
|
).update(sync_status="notsynced")
|
||||||
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
|
||||||
if (
|
|
||||||
task.parent_task in agent_tasks_parent_pks
|
|
||||||
and task.parent_task in added_task_pks
|
|
||||||
):
|
|
||||||
agent.remove_matching_pending_task_actions(task.id)
|
|
||||||
|
|
||||||
PendingAction(
|
|
||||||
agent=agent,
|
|
||||||
action_type="taskaction",
|
|
||||||
details={"action": "taskcreate", "task_id": task.id},
|
|
||||||
).save()
|
|
||||||
task.sync_status = "notsynced"
|
|
||||||
task.save(update_fields=["sync_status"])
|
|
||||||
|
|
||||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||||
|
|
||||||
@@ -200,12 +258,27 @@ class Policy(BaseAuditModel):
|
|||||||
client_policy = client.workstation_policy
|
client_policy = client.workstation_policy
|
||||||
site_policy = site.workstation_policy
|
site_policy = site.workstation_policy
|
||||||
|
|
||||||
|
# check if client/site/agent is blocking inheritance and blank out policies
|
||||||
|
if agent.block_policy_inheritance:
|
||||||
|
site_policy = None
|
||||||
|
client_policy = None
|
||||||
|
default_policy = None
|
||||||
|
elif site.block_policy_inheritance:
|
||||||
|
client_policy = None
|
||||||
|
default_policy = None
|
||||||
|
elif client.block_policy_inheritance:
|
||||||
|
default_policy = None
|
||||||
|
|
||||||
# Used to hold the policies that will be applied and the order in which they are applied
|
# Used to hold the policies that will be applied and the order in which they are applied
|
||||||
# Enforced policies are applied first
|
# Enforced policies are applied first
|
||||||
enforced_checks = list()
|
enforced_checks = list()
|
||||||
policy_checks = list()
|
policy_checks = list()
|
||||||
|
|
||||||
if agent_policy and agent_policy.active:
|
if (
|
||||||
|
agent_policy
|
||||||
|
and agent_policy.active
|
||||||
|
and not agent_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
if agent_policy.enforced:
|
if agent_policy.enforced:
|
||||||
for check in agent_policy.policychecks.all():
|
for check in agent_policy.policychecks.all():
|
||||||
enforced_checks.append(check)
|
enforced_checks.append(check)
|
||||||
@@ -213,7 +286,11 @@ class Policy(BaseAuditModel):
|
|||||||
for check in agent_policy.policychecks.all():
|
for check in agent_policy.policychecks.all():
|
||||||
policy_checks.append(check)
|
policy_checks.append(check)
|
||||||
|
|
||||||
if site_policy and site_policy.active:
|
if (
|
||||||
|
site_policy
|
||||||
|
and site_policy.active
|
||||||
|
and not site_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
if site_policy.enforced:
|
if site_policy.enforced:
|
||||||
for check in site_policy.policychecks.all():
|
for check in site_policy.policychecks.all():
|
||||||
enforced_checks.append(check)
|
enforced_checks.append(check)
|
||||||
@@ -221,7 +298,11 @@ class Policy(BaseAuditModel):
|
|||||||
for check in site_policy.policychecks.all():
|
for check in site_policy.policychecks.all():
|
||||||
policy_checks.append(check)
|
policy_checks.append(check)
|
||||||
|
|
||||||
if client_policy and client_policy.active:
|
if (
|
||||||
|
client_policy
|
||||||
|
and client_policy.active
|
||||||
|
and not client_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
if client_policy.enforced:
|
if client_policy.enforced:
|
||||||
for check in client_policy.policychecks.all():
|
for check in client_policy.policychecks.all():
|
||||||
enforced_checks.append(check)
|
enforced_checks.append(check)
|
||||||
@@ -229,7 +310,11 @@ class Policy(BaseAuditModel):
|
|||||||
for check in client_policy.policychecks.all():
|
for check in client_policy.policychecks.all():
|
||||||
policy_checks.append(check)
|
policy_checks.append(check)
|
||||||
|
|
||||||
if default_policy and default_policy.active:
|
if (
|
||||||
|
default_policy
|
||||||
|
and default_policy.active
|
||||||
|
and not default_policy.is_agent_excluded(agent)
|
||||||
|
):
|
||||||
if default_policy.enforced:
|
if default_policy.enforced:
|
||||||
for check in default_policy.policychecks.all():
|
for check in default_policy.policychecks.all():
|
||||||
enforced_checks.append(check)
|
enforced_checks.append(check)
|
||||||
@@ -345,11 +430,12 @@ class Policy(BaseAuditModel):
|
|||||||
|
|
||||||
# remove policy checks from agent that fell out of policy scope
|
# remove policy checks from agent that fell out of policy scope
|
||||||
agent.agentchecks.filter(
|
agent.agentchecks.filter(
|
||||||
|
managed_by_policy=True,
|
||||||
parent_check__in=[
|
parent_check__in=[
|
||||||
checkpk
|
checkpk
|
||||||
for checkpk in agent_checks_parent_pks
|
for checkpk in agent_checks_parent_pks
|
||||||
if checkpk not in [check.pk for check in final_list]
|
if checkpk not in [check.pk for check in final_list]
|
||||||
]
|
],
|
||||||
).delete()
|
).delete()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
|
|||||||
11
api/tacticalrmm/automation/permissions.py
Normal file
11
api/tacticalrmm/automation/permissions.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm
|
||||||
|
|
||||||
|
|
||||||
|
class AutomationPolicyPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_automation_policies")
|
||||||
@@ -4,9 +4,11 @@ from rest_framework.serializers import (
|
|||||||
SerializerMethodField,
|
SerializerMethodField,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from agents.serializers import AgentHostnameSerializer
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from checks.models import Check
|
from checks.models import Check
|
||||||
from clients.models import Client
|
from clients.models import Client
|
||||||
|
from clients.serializers import ClientSerializer, SiteSerializer
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Policy
|
from .models import Policy
|
||||||
@@ -25,6 +27,9 @@ class PolicyTableSerializer(ModelSerializer):
|
|||||||
agents_count = SerializerMethodField(read_only=True)
|
agents_count = SerializerMethodField(read_only=True)
|
||||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||||
alert_template = ReadOnlyField(source="alert_template.id")
|
alert_template = ReadOnlyField(source="alert_template.id")
|
||||||
|
excluded_clients = ClientSerializer(many=True)
|
||||||
|
excluded_sites = SiteSerializer(many=True)
|
||||||
|
excluded_agents = AgentHostnameSerializer(many=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Policy
|
model = Policy
|
||||||
|
|||||||
@@ -1,167 +1,153 @@
|
|||||||
from agents.models import Agent
|
from typing import Any, Dict, List, Union
|
||||||
from automation.models import Policy
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from checks.models import Check
|
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
|
||||||
# generates policy checks on agents affected by a policy and optionally generate automated tasks
|
def generate_agent_checks_task(
|
||||||
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
policy: int = None,
|
||||||
|
site: int = None,
|
||||||
|
client: int = None,
|
||||||
|
agents: List[int] = list(),
|
||||||
|
all: bool = False,
|
||||||
|
create_tasks: bool = False,
|
||||||
|
) -> Union[str, None]:
|
||||||
|
from agents.models import Agent
|
||||||
|
from automation.models import Policy
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
p = Policy.objects.get(pk=policy) if policy else None
|
||||||
|
|
||||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
# generate checks on all agents if all is specified or if policy is default server/workstation policy
|
||||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
if (p and p.is_default_server_policy and p.is_default_workstation_policy) or all:
|
||||||
elif policy.is_default_server_policy:
|
a = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
|
||||||
"pk", "monitoring_type"
|
# generate checks on all servers if policy is a default servers policy
|
||||||
)
|
elif p and p.is_default_server_policy:
|
||||||
elif policy.is_default_workstation_policy:
|
a = Agent.objects.filter(monitoring_type="server").only("pk", "monitoring_type")
|
||||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
|
||||||
|
# generate checks on all workstations if policy is a default workstations policy
|
||||||
|
elif p and p.is_default_workstation_policy:
|
||||||
|
a = Agent.objects.filter(monitoring_type="workstation").only(
|
||||||
"pk", "monitoring_type"
|
"pk", "monitoring_type"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# generate checks on a list of supplied agents
|
||||||
|
elif agents:
|
||||||
|
a = Agent.objects.filter(pk__in=agents)
|
||||||
|
|
||||||
|
# generate checks on agents affected by supplied policy
|
||||||
|
elif policy:
|
||||||
|
a = p.related_agents().only("pk")
|
||||||
|
|
||||||
|
# generate checks that has specified site
|
||||||
|
elif site:
|
||||||
|
a = Agent.objects.filter(site_id=site)
|
||||||
|
|
||||||
|
# generate checks that has specified client
|
||||||
|
elif client:
|
||||||
|
a = Agent.objects.filter(site__client_id=client)
|
||||||
else:
|
else:
|
||||||
agents = policy.related_agents().only("pk")
|
a = []
|
||||||
|
|
||||||
for agent in agents:
|
for agent in a:
|
||||||
agent.generate_checks_from_policies()
|
agent.generate_checks_from_policies()
|
||||||
if create_tasks:
|
if create_tasks:
|
||||||
agent.generate_tasks_from_policies()
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
@app.task
|
|
||||||
# generates policy checks on a list of agents and optionally generate automated tasks
|
|
||||||
def generate_agent_checks_task(agentpks, create_tasks=False):
|
|
||||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
|
||||||
agent.generate_checks_from_policies()
|
|
||||||
|
|
||||||
if create_tasks:
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task(
|
||||||
# generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks
|
acks_late=True, retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5}
|
||||||
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
)
|
||||||
|
|
||||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
|
||||||
agent.generate_checks_from_policies()
|
|
||||||
|
|
||||||
if create_tasks:
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
# generates policy checks on all agent servers or workstations and optionally generate automated tasks
|
|
||||||
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
|
||||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
|
||||||
agent.generate_checks_from_policies()
|
|
||||||
|
|
||||||
if create_tasks:
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
# deletes a policy managed check from all agents
|
|
||||||
def delete_policy_check_task(checkpk):
|
|
||||||
|
|
||||||
Check.objects.filter(parent_check=checkpk).delete()
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
# updates policy managed check fields on agents
|
# updates policy managed check fields on agents
|
||||||
def update_policy_check_fields_task(checkpk):
|
def update_policy_check_fields_task(check: int) -> str:
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
check = Check.objects.get(pk=checkpk)
|
c: Check = Check.objects.get(pk=check)
|
||||||
|
update_fields: Dict[Any, Any] = {}
|
||||||
|
|
||||||
Check.objects.filter(parent_check=checkpk).update(
|
for field in c.policy_fields_to_copy:
|
||||||
warning_threshold=check.warning_threshold,
|
update_fields[field] = getattr(c, field)
|
||||||
error_threshold=check.error_threshold,
|
|
||||||
alert_severity=check.alert_severity,
|
Check.objects.filter(parent_check=check).update(**update_fields)
|
||||||
name=check.name,
|
|
||||||
disk=check.disk,
|
return "ok"
|
||||||
fails_b4_alert=check.fails_b4_alert,
|
|
||||||
ip=check.ip,
|
|
||||||
script=check.script,
|
|
||||||
script_args=check.script_args,
|
|
||||||
info_return_codes=check.info_return_codes,
|
|
||||||
warning_return_codes=check.warning_return_codes,
|
|
||||||
timeout=check.timeout,
|
|
||||||
pass_if_start_pending=check.pass_if_start_pending,
|
|
||||||
pass_if_svc_not_exist=check.pass_if_svc_not_exist,
|
|
||||||
restart_if_stopped=check.restart_if_stopped,
|
|
||||||
log_name=check.log_name,
|
|
||||||
event_id=check.event_id,
|
|
||||||
event_id_is_wildcard=check.event_id_is_wildcard,
|
|
||||||
event_type=check.event_type,
|
|
||||||
event_source=check.event_source,
|
|
||||||
event_message=check.event_message,
|
|
||||||
fail_when=check.fail_when,
|
|
||||||
search_last_days=check.search_last_days,
|
|
||||||
email_alert=check.email_alert,
|
|
||||||
text_alert=check.text_alert,
|
|
||||||
dashboard_alert=check.dashboard_alert,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
|
||||||
# generates policy tasks on agents affected by a policy
|
# generates policy tasks on agents affected by a policy
|
||||||
def generate_agent_tasks_from_policies_task(policypk):
|
def generate_agent_autotasks_task(policy: int = None) -> str:
|
||||||
|
from agents.models import Agent
|
||||||
|
from automation.models import Policy
|
||||||
|
|
||||||
policy = Policy.objects.get(pk=policypk)
|
p: Policy = Policy.objects.get(pk=policy)
|
||||||
|
|
||||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
if p and p.is_default_server_policy and p.is_default_workstation_policy:
|
||||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||||
elif policy.is_default_server_policy:
|
elif p and p.is_default_server_policy:
|
||||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||||
"pk", "monitoring_type"
|
"pk", "monitoring_type"
|
||||||
)
|
)
|
||||||
elif policy.is_default_workstation_policy:
|
elif p and p.is_default_workstation_policy:
|
||||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||||
"pk", "monitoring_type"
|
"pk", "monitoring_type"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
agents = policy.related_agents().only("pk")
|
agents = p.related_agents().only("pk")
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
agent.generate_tasks_from_policies()
|
agent.generate_tasks_from_policies()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
@app.task
|
|
||||||
def delete_policy_autotask_task(taskpk):
|
@app.task(
|
||||||
|
acks_late=True,
|
||||||
|
retry_backoff=5,
|
||||||
|
retry_jitter=True,
|
||||||
|
retry_kwargs={"max_retries": 5},
|
||||||
|
)
|
||||||
|
def delete_policy_autotasks_task(task: int) -> str:
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from autotasks.tasks import delete_win_task_schedule
|
|
||||||
|
|
||||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
for t in AutomatedTask.objects.filter(parent_task=task):
|
||||||
delete_win_task_schedule.delay(task.pk)
|
t.delete_task_on_agent()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def run_win_policy_autotask_task(task_pks):
|
def run_win_policy_autotasks_task(task: int) -> str:
|
||||||
from autotasks.tasks import run_win_task
|
from autotasks.models import AutomatedTask
|
||||||
|
|
||||||
for task in task_pks:
|
for t in AutomatedTask.objects.filter(parent_task=task):
|
||||||
run_win_task.delay(task)
|
t.run_win_task()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task(
|
||||||
def update_policy_task_fields_task(taskpk, update_agent=False):
|
acks_late=True,
|
||||||
from autotasks.tasks import enable_or_disable_win_task
|
retry_backoff=5,
|
||||||
|
retry_jitter=True,
|
||||||
|
retry_kwargs={"max_retries": 5},
|
||||||
|
)
|
||||||
|
def update_policy_autotasks_fields_task(task: int, update_agent: bool = False) -> str:
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
|
||||||
task = AutomatedTask.objects.get(pk=taskpk)
|
t = AutomatedTask.objects.get(pk=task)
|
||||||
|
update_fields: Dict[str, Any] = {}
|
||||||
|
|
||||||
AutomatedTask.objects.filter(parent_task=taskpk).update(
|
for field in t.policy_fields_to_copy:
|
||||||
alert_severity=task.alert_severity,
|
update_fields[field] = getattr(t, field)
|
||||||
email_alert=task.email_alert,
|
|
||||||
text_alert=task.text_alert,
|
AutomatedTask.objects.filter(parent_task=task).update(**update_fields)
|
||||||
dashboard_alert=task.dashboard_alert,
|
|
||||||
script=task.script,
|
|
||||||
script_args=task.script_args,
|
|
||||||
name=task.name,
|
|
||||||
timeout=task.timeout,
|
|
||||||
enabled=task.enabled,
|
|
||||||
)
|
|
||||||
|
|
||||||
if update_agent:
|
if update_agent:
|
||||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
for t in AutomatedTask.objects.filter(parent_task=task).exclude(
|
||||||
enable_or_disable_win_task.delay(task.pk, task.enabled)
|
sync_status="initial"
|
||||||
|
):
|
||||||
|
t.modify_task_on_agent()
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -7,6 +7,7 @@ urlpatterns = [
|
|||||||
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
||||||
path("policies/overview/", views.OverviewPolicy.as_view()),
|
path("policies/overview/", views.OverviewPolicy.as_view()),
|
||||||
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
||||||
|
path("sync/", views.PolicySync.as_view()),
|
||||||
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
||||||
path("<int:pk>/policyautomatedtasks/", views.PolicyAutoTask.as_view()),
|
path("<int:pk>/policyautomatedtasks/", views.PolicyAutoTask.as_view()),
|
||||||
path("policycheckstatus/<int:check>/check/", views.PolicyCheck.as_view()),
|
path("policycheckstatus/<int:check>/check/", views.PolicyCheck.as_view()),
|
||||||
|
|||||||
@@ -1,17 +1,19 @@
|
|||||||
from django.shortcuts import get_object_or_404
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from agents.serializers import AgentHostnameSerializer
|
from agents.serializers import AgentHostnameSerializer
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
from checks.models import Check
|
from checks.models import Check
|
||||||
from clients.models import Client
|
from clients.models import Client
|
||||||
from clients.serializers import ClientSerializer, SiteSerializer
|
from clients.serializers import ClientSerializer, SiteSerializer
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from tacticalrmm.utils import notify_error
|
||||||
from winupdate.models import WinUpdatePolicy
|
from winupdate.models import WinUpdatePolicy
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Policy
|
from .models import Policy
|
||||||
|
from .permissions import AutomationPolicyPerms
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
AutoTasksFieldSerializer,
|
AutoTasksFieldSerializer,
|
||||||
PolicyCheckSerializer,
|
PolicyCheckSerializer,
|
||||||
@@ -21,10 +23,11 @@ from .serializers import (
|
|||||||
PolicyTableSerializer,
|
PolicyTableSerializer,
|
||||||
PolicyTaskStatusSerializer,
|
PolicyTaskStatusSerializer,
|
||||||
)
|
)
|
||||||
from .tasks import run_win_policy_autotask_task
|
|
||||||
|
|
||||||
|
|
||||||
class GetAddPolicies(APIView):
|
class GetAddPolicies(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
policies = Policy.objects.all()
|
policies = Policy.objects.all()
|
||||||
|
|
||||||
@@ -52,18 +55,30 @@ class GetAddPolicies(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetUpdateDeletePolicy(APIView):
|
class GetUpdateDeletePolicy(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
policy = get_object_or_404(Policy, pk=pk)
|
policy = get_object_or_404(Policy, pk=pk)
|
||||||
|
|
||||||
return Response(PolicySerializer(policy).data)
|
return Response(PolicySerializer(policy).data)
|
||||||
|
|
||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
|
from .tasks import generate_agent_checks_task
|
||||||
|
|
||||||
policy = get_object_or_404(Policy, pk=pk)
|
policy = get_object_or_404(Policy, pk=pk)
|
||||||
|
|
||||||
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
|
# check for excluding objects and in the request and if present generate policies
|
||||||
|
if (
|
||||||
|
"excluded_sites" in request.data.keys()
|
||||||
|
or "excluded_clients" in request.data.keys()
|
||||||
|
or "excluded_agents" in request.data.keys()
|
||||||
|
):
|
||||||
|
generate_agent_checks_task.delay(policy=pk, create_tasks=True)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
@@ -72,8 +87,22 @@ class GetUpdateDeletePolicy(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class PolicyAutoTask(APIView):
|
class PolicySync(APIView):
|
||||||
|
def post(self, request):
|
||||||
|
if "policy" in request.data.keys():
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
|
generate_agent_checks_task.delay(
|
||||||
|
policy=request.data["policy"], create_tasks=True
|
||||||
|
)
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return notify_error("The request was invalid")
|
||||||
|
|
||||||
|
|
||||||
|
class PolicyAutoTask(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||||
# tasks associated with policy
|
# tasks associated with policy
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
tasks = AutomatedTask.objects.filter(policy=pk)
|
tasks = AutomatedTask.objects.filter(policy=pk)
|
||||||
@@ -86,12 +115,15 @@ class PolicyAutoTask(APIView):
|
|||||||
|
|
||||||
# bulk run win tasks associated with policy
|
# bulk run win tasks associated with policy
|
||||||
def put(self, request, task):
|
def put(self, request, task):
|
||||||
tasks = AutomatedTask.objects.filter(parent_task=task)
|
from .tasks import run_win_policy_autotasks_task
|
||||||
run_win_policy_autotask_task.delay([task.id for task in tasks])
|
|
||||||
|
run_win_policy_autotasks_task.delay(task=task)
|
||||||
return Response("Affected agent tasks will run shortly")
|
return Response("Affected agent tasks will run shortly")
|
||||||
|
|
||||||
|
|
||||||
class PolicyCheck(APIView):
|
class PolicyCheck(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
checks = Check.objects.filter(policy__pk=pk, agent=None)
|
checks = Check.objects.filter(policy__pk=pk, agent=None)
|
||||||
return Response(PolicyCheckSerializer(checks, many=True).data)
|
return Response(PolicyCheckSerializer(checks, many=True).data)
|
||||||
@@ -164,14 +196,14 @@ class GetRelated(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class UpdatePatchPolicy(APIView):
|
class UpdatePatchPolicy(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||||
# create new patch policy
|
# create new patch policy
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
policy = get_object_or_404(Policy, pk=request.data["policy"])
|
policy = get_object_or_404(Policy, pk=request.data["policy"])
|
||||||
|
|
||||||
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
|
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.policy = policy
|
serializer.policy = policy # type: ignore
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-02-24 05:37
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0017_auto_20210210_1512'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='run_asap_after_missed',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-04-04 00:32
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0019_globalkvstore'),
|
||||||
|
('scripts', '0007_script_args'),
|
||||||
|
('autotasks', '0018_automatedtask_run_asap_after_missed'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='custom_field',
|
||||||
|
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autotask', to='core.customfield'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='retvalue',
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='script',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autoscript', to='scripts.script'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-04-21 02:26
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0019_auto_20210404_0032'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='sync_status',
|
||||||
|
field=models.CharField(choices=[('synced', 'Synced With Agent'), ('notsynced', 'Waiting On Agent Checkin'), ('pendingdeletion', 'Pending Deletion on Agent'), ('initial', 'Initial Task Sync')], default='initial', max_length=100),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-04-27 14:11
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0021_customfield_hide_in_ui'),
|
||||||
|
('autotasks', '0020_auto_20210421_0226'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='custom_field',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autotasks', to='core.customfield'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-05-29 03:26
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0021_alter_automatedtask_custom_field'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='collector_all_output',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,17 +1,20 @@
|
|||||||
|
import asyncio
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
from alerts.models import SEVERITY_CHOICES
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.fields import DateTimeField
|
from django.db.models.fields import DateTimeField
|
||||||
|
from django.db.utils import DatabaseError
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
from alerts.models import SEVERITY_CHOICES
|
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
from loguru import logger
|
||||||
|
from packaging import version as pyver
|
||||||
from tacticalrmm.utils import bitdays_to_string
|
from tacticalrmm.utils import bitdays_to_string
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
@@ -37,6 +40,7 @@ SYNC_STATUS_CHOICES = [
|
|||||||
("synced", "Synced With Agent"),
|
("synced", "Synced With Agent"),
|
||||||
("notsynced", "Waiting On Agent Checkin"),
|
("notsynced", "Waiting On Agent Checkin"),
|
||||||
("pendingdeletion", "Pending Deletion on Agent"),
|
("pendingdeletion", "Pending Deletion on Agent"),
|
||||||
|
("initial", "Initial Task Sync"),
|
||||||
]
|
]
|
||||||
|
|
||||||
TASK_STATUS_CHOICES = [
|
TASK_STATUS_CHOICES = [
|
||||||
@@ -61,12 +65,19 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
blank=True,
|
blank=True,
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
)
|
)
|
||||||
|
custom_field = models.ForeignKey(
|
||||||
|
"core.CustomField",
|
||||||
|
related_name="autotasks",
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
)
|
||||||
script = models.ForeignKey(
|
script = models.ForeignKey(
|
||||||
"scripts.Script",
|
"scripts.Script",
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
related_name="autoscript",
|
related_name="autoscript",
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
script_args = ArrayField(
|
script_args = ArrayField(
|
||||||
models.CharField(max_length=255, null=True, blank=True),
|
models.CharField(max_length=255, null=True, blank=True),
|
||||||
@@ -94,12 +105,15 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
task_type = models.CharField(
|
task_type = models.CharField(
|
||||||
max_length=100, choices=TASK_TYPE_CHOICES, default="manual"
|
max_length=100, choices=TASK_TYPE_CHOICES, default="manual"
|
||||||
)
|
)
|
||||||
|
collector_all_output = models.BooleanField(default=False)
|
||||||
run_time_date = DateTimeField(null=True, blank=True)
|
run_time_date = DateTimeField(null=True, blank=True)
|
||||||
remove_if_not_scheduled = models.BooleanField(default=False)
|
remove_if_not_scheduled = models.BooleanField(default=False)
|
||||||
|
run_asap_after_missed = models.BooleanField(default=False) # added in agent v1.4.7
|
||||||
managed_by_policy = models.BooleanField(default=False)
|
managed_by_policy = models.BooleanField(default=False)
|
||||||
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
||||||
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
||||||
timeout = models.PositiveIntegerField(default=120)
|
timeout = models.PositiveIntegerField(default=120)
|
||||||
|
retvalue = models.TextField(null=True, blank=True)
|
||||||
retcode = models.IntegerField(null=True, blank=True)
|
retcode = models.IntegerField(null=True, blank=True)
|
||||||
stdout = models.TextField(null=True, blank=True)
|
stdout = models.TextField(null=True, blank=True)
|
||||||
stderr = models.TextField(null=True, blank=True)
|
stderr = models.TextField(null=True, blank=True)
|
||||||
@@ -110,7 +124,7 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
||||||
)
|
)
|
||||||
sync_status = models.CharField(
|
sync_status = models.CharField(
|
||||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced"
|
max_length=100, choices=SYNC_STATUS_CHOICES, default="initial"
|
||||||
)
|
)
|
||||||
alert_severity = models.CharField(
|
alert_severity = models.CharField(
|
||||||
max_length=30, choices=SEVERITY_CHOICES, default="info"
|
max_length=30, choices=SEVERITY_CHOICES, default="info"
|
||||||
@@ -147,6 +161,32 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
|
|
||||||
return self.last_run
|
return self.last_run
|
||||||
|
|
||||||
|
# These fields will be duplicated on the agent tasks that are managed by a policy
|
||||||
|
@property
|
||||||
|
def policy_fields_to_copy(self) -> List[str]:
|
||||||
|
return [
|
||||||
|
"alert_severity",
|
||||||
|
"email_alert",
|
||||||
|
"text_alert",
|
||||||
|
"dashboard_alert",
|
||||||
|
"script",
|
||||||
|
"script_args",
|
||||||
|
"assigned_check",
|
||||||
|
"name",
|
||||||
|
"run_time_days",
|
||||||
|
"run_time_minute",
|
||||||
|
"run_time_bit_weekdays",
|
||||||
|
"run_time_date",
|
||||||
|
"task_type",
|
||||||
|
"win_task_name",
|
||||||
|
"timeout",
|
||||||
|
"enabled",
|
||||||
|
"remove_if_not_scheduled",
|
||||||
|
"run_asap_after_missed",
|
||||||
|
"custom_field",
|
||||||
|
"collector_all_output",
|
||||||
|
]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_task_name():
|
def generate_task_name():
|
||||||
chars = string.ascii_letters
|
chars = string.ascii_letters
|
||||||
@@ -159,223 +199,220 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
|
|
||||||
return TaskSerializer(task).data
|
return TaskSerializer(task).data
|
||||||
|
|
||||||
def create_policy_task(self, agent=None, policy=None):
|
def create_policy_task(self, agent=None, policy=None, assigned_check=None):
|
||||||
from .tasks import create_win_task_schedule
|
|
||||||
|
|
||||||
# if policy is present, then this task is being copied to another policy
|
# if policy is present, then this task is being copied to another policy
|
||||||
# if agent is present, then this task is being created on an agent from a policy
|
# if agent is present, then this task is being created on an agent from a policy
|
||||||
# exit if neither are set or if both are set
|
# exit if neither are set or if both are set
|
||||||
if not agent and not policy or agent and policy:
|
# also exit if assigned_check is set because this task will be created when the check is
|
||||||
|
if (
|
||||||
|
(not agent and not policy)
|
||||||
|
or (agent and policy)
|
||||||
|
or (self.assigned_check and not assigned_check)
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
assigned_check = None
|
|
||||||
|
|
||||||
# get correct assigned check to task if set
|
|
||||||
if agent and self.assigned_check:
|
|
||||||
# check if there is a matching check on the agent
|
|
||||||
if agent.agentchecks.filter(parent_check=self.assigned_check.pk).exists():
|
|
||||||
assigned_check = agent.agentchecks.filter(
|
|
||||||
parent_check=self.assigned_check.pk
|
|
||||||
).first()
|
|
||||||
# check was overriden by agent and we need to use that agents check
|
|
||||||
else:
|
|
||||||
if agent.agentchecks.filter(
|
|
||||||
check_type=self.assigned_check.check_type, overriden_by_policy=True
|
|
||||||
).exists():
|
|
||||||
assigned_check = agent.agentchecks.filter(
|
|
||||||
check_type=self.assigned_check.check_type,
|
|
||||||
overriden_by_policy=True,
|
|
||||||
).first()
|
|
||||||
elif policy and self.assigned_check:
|
|
||||||
if policy.policychecks.filter(name=self.assigned_check.name).exists():
|
|
||||||
assigned_check = policy.policychecks.filter(
|
|
||||||
name=self.assigned_check.name
|
|
||||||
).first()
|
|
||||||
else:
|
|
||||||
assigned_check = policy.policychecks.filter(
|
|
||||||
check_type=self.assigned_check.check_type
|
|
||||||
).first()
|
|
||||||
|
|
||||||
task = AutomatedTask.objects.create(
|
task = AutomatedTask.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
policy=policy,
|
policy=policy,
|
||||||
managed_by_policy=bool(agent),
|
managed_by_policy=bool(agent),
|
||||||
parent_task=(self.pk if agent else None),
|
parent_task=(self.pk if agent else None),
|
||||||
alert_severity=self.alert_severity,
|
|
||||||
email_alert=self.email_alert,
|
|
||||||
text_alert=self.text_alert,
|
|
||||||
dashboard_alert=self.dashboard_alert,
|
|
||||||
script=self.script,
|
|
||||||
script_args=self.script_args,
|
|
||||||
assigned_check=assigned_check,
|
assigned_check=assigned_check,
|
||||||
name=self.name,
|
|
||||||
run_time_days=self.run_time_days,
|
|
||||||
run_time_minute=self.run_time_minute,
|
|
||||||
run_time_bit_weekdays=self.run_time_bit_weekdays,
|
|
||||||
run_time_date=self.run_time_date,
|
|
||||||
task_type=self.task_type,
|
|
||||||
win_task_name=self.win_task_name,
|
|
||||||
timeout=self.timeout,
|
|
||||||
enabled=self.enabled,
|
|
||||||
remove_if_not_scheduled=self.remove_if_not_scheduled,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
create_win_task_schedule.delay(task.pk)
|
for field in self.policy_fields_to_copy:
|
||||||
|
if field != "assigned_check":
|
||||||
|
setattr(task, field, getattr(self, field))
|
||||||
|
|
||||||
def handle_alert(self) -> None:
|
task.save()
|
||||||
from alerts.models import Alert
|
|
||||||
from autotasks.tasks import (
|
if agent:
|
||||||
handle_resolved_task_email_alert,
|
task.create_task_on_agent()
|
||||||
handle_resolved_task_sms_alert,
|
|
||||||
handle_task_email_alert,
|
def create_task_on_agent(self):
|
||||||
handle_task_sms_alert,
|
from agents.models import Agent
|
||||||
|
|
||||||
|
agent = (
|
||||||
|
Agent.objects.filter(pk=self.agent.pk)
|
||||||
|
.only("pk", "version", "hostname", "agent_id")
|
||||||
|
.first()
|
||||||
)
|
)
|
||||||
|
|
||||||
self.status = "failing" if self.retcode != 0 else "passing"
|
if self.task_type == "scheduled":
|
||||||
self.save()
|
nats_data = {
|
||||||
|
"func": "schedtask",
|
||||||
|
"schedtaskpayload": {
|
||||||
|
"type": "rmm",
|
||||||
|
"trigger": "weekly",
|
||||||
|
"weekdays": self.run_time_bit_weekdays,
|
||||||
|
"pk": self.pk,
|
||||||
|
"name": self.win_task_name,
|
||||||
|
"hour": dt.datetime.strptime(self.run_time_minute, "%H:%M").hour,
|
||||||
|
"min": dt.datetime.strptime(self.run_time_minute, "%H:%M").minute,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
# return if agent is in maintenance mode
|
elif self.task_type == "runonce":
|
||||||
if self.agent.maintenance_mode:
|
# check if scheduled time is in the past
|
||||||
return
|
agent_tz = pytz.timezone(agent.timezone)
|
||||||
|
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||||
|
pytz.utc
|
||||||
|
)
|
||||||
|
now = djangotime.now()
|
||||||
|
if task_time_utc < now:
|
||||||
|
self.run_time_date = now.astimezone(agent_tz).replace(
|
||||||
|
tzinfo=pytz.utc
|
||||||
|
) + djangotime.timedelta(minutes=5)
|
||||||
|
self.save(update_fields=["run_time_date"])
|
||||||
|
|
||||||
# see if agent has an alert template and use that
|
nats_data = {
|
||||||
alert_template = self.agent.get_alert_template()
|
"func": "schedtask",
|
||||||
|
"schedtaskpayload": {
|
||||||
|
"type": "rmm",
|
||||||
|
"trigger": "once",
|
||||||
|
"pk": self.pk,
|
||||||
|
"name": self.win_task_name,
|
||||||
|
"year": int(dt.datetime.strftime(self.run_time_date, "%Y")),
|
||||||
|
"month": dt.datetime.strftime(self.run_time_date, "%B"),
|
||||||
|
"day": int(dt.datetime.strftime(self.run_time_date, "%d")),
|
||||||
|
"hour": int(dt.datetime.strftime(self.run_time_date, "%H")),
|
||||||
|
"min": int(dt.datetime.strftime(self.run_time_date, "%M")),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
# resolve alert if it exists
|
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse(
|
||||||
if self.status == "passing":
|
"1.4.7"
|
||||||
if Alert.objects.filter(assigned_task=self, resolved=False).exists():
|
):
|
||||||
alert = Alert.objects.get(assigned_task=self, resolved=False)
|
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||||
alert.resolve()
|
|
||||||
|
|
||||||
# check if resolved email should be send
|
if self.remove_if_not_scheduled:
|
||||||
if (
|
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||||
not alert.resolved_email_sent
|
|
||||||
and self.email_alert
|
|
||||||
or alert_template
|
|
||||||
and alert_template.task_email_on_resolved
|
|
||||||
):
|
|
||||||
handle_resolved_task_email_alert.delay(pk=alert.pk)
|
|
||||||
|
|
||||||
# check if resolved text should be sent
|
elif self.task_type == "checkfailure" or self.task_type == "manual":
|
||||||
if (
|
nats_data = {
|
||||||
not alert.resolved_sms_sent
|
"func": "schedtask",
|
||||||
and self.text_alert
|
"schedtaskpayload": {
|
||||||
or alert_template
|
"type": "rmm",
|
||||||
and alert_template.task_text_on_resolved
|
"trigger": "manual",
|
||||||
):
|
"pk": self.pk,
|
||||||
handle_resolved_task_sms_alert.delay(pk=alert.pk)
|
"name": self.win_task_name,
|
||||||
|
},
|
||||||
# check if resolved script should be run
|
}
|
||||||
if (
|
|
||||||
alert_template
|
|
||||||
and alert_template.resolved_action
|
|
||||||
and not alert.resolved_action_run
|
|
||||||
):
|
|
||||||
|
|
||||||
r = self.agent.run_script(
|
|
||||||
scriptpk=alert_template.resolved_action.pk,
|
|
||||||
args=alert_template.resolved_action_args,
|
|
||||||
timeout=alert_template.resolved_action_timeout,
|
|
||||||
wait=True,
|
|
||||||
full=True,
|
|
||||||
run_on_any=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# command was successful
|
|
||||||
if type(r) == dict:
|
|
||||||
alert.resolved_action_retcode = r["retcode"]
|
|
||||||
alert.resolved_action_stdout = r["stdout"]
|
|
||||||
alert.resolved_action_stderr = r["stderr"]
|
|
||||||
alert.resolved_action_execution_time = "{:.4f}".format(
|
|
||||||
r["execution_time"]
|
|
||||||
)
|
|
||||||
alert.resolved_action_run = djangotime.now()
|
|
||||||
alert.save()
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} resolved alert for task: {self.name}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# create alert if task is failing
|
|
||||||
else:
|
else:
|
||||||
if not Alert.objects.filter(assigned_task=self, resolved=False).exists():
|
return "error"
|
||||||
alert = Alert.create_task_alert(self)
|
|
||||||
else:
|
|
||||||
alert = Alert.objects.get(assigned_task=self, resolved=False)
|
|
||||||
|
|
||||||
# check if alert severity changed on task and update the alert
|
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||||
if self.alert_severity != alert.severity:
|
|
||||||
alert.severity = self.alert_severity
|
|
||||||
alert.save(update_fields=["severity"])
|
|
||||||
|
|
||||||
# create alert in dashboard if enabled
|
if r != "ok":
|
||||||
if (
|
self.sync_status = "initial"
|
||||||
self.dashboard_alert
|
self.save(update_fields=["sync_status"])
|
||||||
or alert_template
|
logger.warning(
|
||||||
and alert_template.task_always_alert
|
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in."
|
||||||
):
|
)
|
||||||
alert.hidden = False
|
return "timeout"
|
||||||
alert.save()
|
else:
|
||||||
|
self.sync_status = "synced"
|
||||||
|
self.save(update_fields=["sync_status"])
|
||||||
|
logger.info(f"{agent.hostname} task {self.name} was successfully created")
|
||||||
|
|
||||||
# send email if enabled
|
return "ok"
|
||||||
if (
|
|
||||||
not alert.email_sent
|
def modify_task_on_agent(self):
|
||||||
and self.email_alert
|
from agents.models import Agent
|
||||||
or alert_template
|
|
||||||
and self.alert_severity in alert_template.task_email_alert_severity
|
agent = (
|
||||||
and alert_template.check_always_email
|
Agent.objects.filter(pk=self.agent.pk)
|
||||||
):
|
.only("pk", "version", "hostname", "agent_id")
|
||||||
handle_task_email_alert.delay(
|
.first()
|
||||||
pk=alert.pk,
|
)
|
||||||
alert_template=alert_template.check_periodic_alert_days
|
|
||||||
if alert_template
|
nats_data = {
|
||||||
else None,
|
"func": "enableschedtask",
|
||||||
|
"schedtaskpayload": {
|
||||||
|
"name": self.win_task_name,
|
||||||
|
"enabled": self.enabled,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||||
|
|
||||||
|
if r != "ok":
|
||||||
|
self.sync_status = "notsynced"
|
||||||
|
self.save(update_fields=["sync_status"])
|
||||||
|
logger.warning(
|
||||||
|
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin"
|
||||||
|
)
|
||||||
|
return "timeout"
|
||||||
|
else:
|
||||||
|
self.sync_status = "synced"
|
||||||
|
self.save(update_fields=["sync_status"])
|
||||||
|
logger.info(f"{agent.hostname} task {self.name} was successfully modified")
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
def delete_task_on_agent(self):
|
||||||
|
from agents.models import Agent
|
||||||
|
|
||||||
|
agent = (
|
||||||
|
Agent.objects.filter(pk=self.agent.pk)
|
||||||
|
.only("pk", "version", "hostname", "agent_id")
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
nats_data = {
|
||||||
|
"func": "delschedtask",
|
||||||
|
"schedtaskpayload": {"name": self.win_task_name},
|
||||||
|
}
|
||||||
|
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||||
|
|
||||||
|
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||||
|
self.sync_status = "pendingdeletion"
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.save(update_fields=["sync_status"])
|
||||||
|
except DatabaseError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
f"{agent.hostname} task {self.name} will be deleted on next checkin"
|
||||||
|
)
|
||||||
|
return "timeout"
|
||||||
|
else:
|
||||||
|
self.delete()
|
||||||
|
logger.info(f"{agent.hostname} task {self.name} was deleted")
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
def run_win_task(self):
|
||||||
|
from agents.models import Agent
|
||||||
|
|
||||||
|
agent = (
|
||||||
|
Agent.objects.filter(pk=self.agent.pk)
|
||||||
|
.only("pk", "version", "hostname", "agent_id")
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
def should_create_alert(self, alert_template=None):
|
||||||
|
return (
|
||||||
|
self.dashboard_alert
|
||||||
|
or self.email_alert
|
||||||
|
or self.text_alert
|
||||||
|
or (
|
||||||
|
alert_template
|
||||||
|
and (
|
||||||
|
alert_template.task_always_alert
|
||||||
|
or alert_template.task_always_email
|
||||||
|
or alert_template.task_always_text
|
||||||
)
|
)
|
||||||
|
)
|
||||||
# send text if enabled
|
)
|
||||||
if (
|
|
||||||
not alert.sms_sent
|
|
||||||
and self.text_alert
|
|
||||||
or alert_template
|
|
||||||
and self.alert_severity in alert_template.task_text_alert_severity
|
|
||||||
and alert_template.check_always_text
|
|
||||||
):
|
|
||||||
handle_task_sms_alert.delay(
|
|
||||||
pk=alert.pk,
|
|
||||||
alert_template=alert_template.check_periodic_alert_days
|
|
||||||
if alert_template
|
|
||||||
else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# check if any scripts should be run
|
|
||||||
if alert_template and alert_template.action and not alert.action_run:
|
|
||||||
r = self.agent.run_script(
|
|
||||||
scriptpk=alert_template.action.pk,
|
|
||||||
args=alert_template.action_args,
|
|
||||||
timeout=alert_template.action_timeout,
|
|
||||||
wait=True,
|
|
||||||
full=True,
|
|
||||||
run_on_any=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# command was successful
|
|
||||||
if type(r) == dict:
|
|
||||||
alert.action_retcode = r["retcode"]
|
|
||||||
alert.action_stdout = r["stdout"]
|
|
||||||
alert.action_stderr = r["stderr"]
|
|
||||||
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
|
||||||
alert.action_run = djangotime.now()
|
|
||||||
alert.save()
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} failure alert for task: {self.name}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def send_email(self):
|
def send_email(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
|
|
||||||
if self.agent:
|
if self.agent:
|
||||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||||
@@ -387,14 +424,13 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
)
|
)
|
||||||
|
|
||||||
CORE.send_mail(subject, body, alert_template)
|
CORE.send_mail(subject, body, self.agent.alert_template)
|
||||||
|
|
||||||
def send_sms(self):
|
def send_sms(self):
|
||||||
|
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
|
|
||||||
if self.agent:
|
if self.agent:
|
||||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||||
@@ -406,13 +442,11 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
)
|
)
|
||||||
|
|
||||||
CORE.send_sms(body, alert_template=alert_template)
|
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||||
|
|
||||||
def send_resolved_email(self):
|
def send_resolved_email(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||||
body = (
|
body = (
|
||||||
@@ -420,16 +454,15 @@ class AutomatedTask(BaseAuditModel):
|
|||||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
)
|
)
|
||||||
|
|
||||||
CORE.send_mail(subject, body, alert_template=alert_template)
|
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||||
|
|
||||||
def send_resolved_sms(self):
|
def send_resolved_sms(self):
|
||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
|
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||||
body = (
|
body = (
|
||||||
subject
|
subject
|
||||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||||
)
|
)
|
||||||
CORE.send_sms(body, alert_template=alert_template)
|
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||||
|
|||||||
16
api/tacticalrmm/autotasks/permissions.py
Normal file
16
api/tacticalrmm/autotasks/permissions.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm
|
||||||
|
|
||||||
|
|
||||||
|
class ManageAutoTaskPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_autotasks")
|
||||||
|
|
||||||
|
|
||||||
|
class RunAutoTaskPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_run_autotasks")
|
||||||
@@ -18,7 +18,7 @@ class TaskSerializer(serializers.ModelSerializer):
|
|||||||
def get_alert_template(self, obj):
|
def get_alert_template(self, obj):
|
||||||
|
|
||||||
if obj.agent:
|
if obj.agent:
|
||||||
alert_template = obj.agent.get_alert_template()
|
alert_template = obj.agent.alert_template
|
||||||
else:
|
else:
|
||||||
alert_template = None
|
alert_template = None
|
||||||
|
|
||||||
@@ -68,6 +68,12 @@ class TaskRunnerGetSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
class TaskGOGetSerializer(serializers.ModelSerializer):
|
class TaskGOGetSerializer(serializers.ModelSerializer):
|
||||||
script = ScriptCheckSerializer(read_only=True)
|
script = ScriptCheckSerializer(read_only=True)
|
||||||
|
script_args = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_script_args(self, obj):
|
||||||
|
return Script.parse_script_args(
|
||||||
|
agent=obj.agent, shell=obj.script.shell, args=obj.script_args
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AutomatedTask
|
model = AutomatedTask
|
||||||
|
|||||||
@@ -4,204 +4,46 @@ import random
|
|||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
import pytz
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from packaging import version as pyver
|
|
||||||
|
|
||||||
from logs.models import PendingAction
|
from autotasks.models import AutomatedTask
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
from .models import AutomatedTask
|
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def create_win_task_schedule(pk, pending_action=False):
|
def create_win_task_schedule(pk):
|
||||||
task = AutomatedTask.objects.get(pk=pk)
|
task = AutomatedTask.objects.get(pk=pk)
|
||||||
|
|
||||||
if task.task_type == "scheduled":
|
task.create_task_on_agent()
|
||||||
nats_data = {
|
|
||||||
"func": "schedtask",
|
|
||||||
"schedtaskpayload": {
|
|
||||||
"type": "rmm",
|
|
||||||
"trigger": "weekly",
|
|
||||||
"weekdays": task.run_time_bit_weekdays,
|
|
||||||
"pk": task.pk,
|
|
||||||
"name": task.win_task_name,
|
|
||||||
"hour": dt.datetime.strptime(task.run_time_minute, "%H:%M").hour,
|
|
||||||
"min": dt.datetime.strptime(task.run_time_minute, "%H:%M").minute,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
elif task.task_type == "runonce":
|
|
||||||
# check if scheduled time is in the past
|
|
||||||
agent_tz = pytz.timezone(task.agent.timezone)
|
|
||||||
task_time_utc = task.run_time_date.replace(tzinfo=agent_tz).astimezone(pytz.utc)
|
|
||||||
now = djangotime.now()
|
|
||||||
if task_time_utc < now:
|
|
||||||
task.run_time_date = now.astimezone(agent_tz).replace(
|
|
||||||
tzinfo=pytz.utc
|
|
||||||
) + djangotime.timedelta(minutes=5)
|
|
||||||
task.save()
|
|
||||||
|
|
||||||
nats_data = {
|
|
||||||
"func": "schedtask",
|
|
||||||
"schedtaskpayload": {
|
|
||||||
"type": "rmm",
|
|
||||||
"trigger": "once",
|
|
||||||
"pk": task.pk,
|
|
||||||
"name": task.win_task_name,
|
|
||||||
"year": int(dt.datetime.strftime(task.run_time_date, "%Y")),
|
|
||||||
"month": dt.datetime.strftime(task.run_time_date, "%B"),
|
|
||||||
"day": int(dt.datetime.strftime(task.run_time_date, "%d")),
|
|
||||||
"hour": int(dt.datetime.strftime(task.run_time_date, "%H")),
|
|
||||||
"min": int(dt.datetime.strftime(task.run_time_date, "%M")),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.remove_if_not_scheduled and pyver.parse(
|
|
||||||
task.agent.version
|
|
||||||
) >= pyver.parse("1.1.2"):
|
|
||||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
|
||||||
|
|
||||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
|
||||||
nats_data = {
|
|
||||||
"func": "schedtask",
|
|
||||||
"schedtaskpayload": {
|
|
||||||
"type": "rmm",
|
|
||||||
"trigger": "manual",
|
|
||||||
"pk": task.pk,
|
|
||||||
"name": task.win_task_name,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return "error"
|
|
||||||
|
|
||||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
|
||||||
|
|
||||||
if r != "ok":
|
|
||||||
# don't create pending action if this task was initiated by a pending action
|
|
||||||
if not pending_action:
|
|
||||||
|
|
||||||
# complete any other pending actions on agent with same task_id
|
|
||||||
task.agent.remove_matching_pending_task_actions(task.id)
|
|
||||||
|
|
||||||
PendingAction(
|
|
||||||
agent=task.agent,
|
|
||||||
action_type="taskaction",
|
|
||||||
details={"action": "taskcreate", "task_id": task.id},
|
|
||||||
).save()
|
|
||||||
task.sync_status = "notsynced"
|
|
||||||
task.save(update_fields=["sync_status"])
|
|
||||||
|
|
||||||
logger.error(
|
|
||||||
f"Unable to create scheduled task {task.win_task_name} on {task.agent.hostname}. It will be created when the agent checks in."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
# clear pending action since it was successful
|
|
||||||
if pending_action:
|
|
||||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
|
||||||
pendingaction.status = "completed"
|
|
||||||
pendingaction.save(update_fields=["status"])
|
|
||||||
|
|
||||||
task.sync_status = "synced"
|
|
||||||
task.save(update_fields=["sync_status"])
|
|
||||||
|
|
||||||
logger.info(f"{task.agent.hostname} task {task.name} was successfully created")
|
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def enable_or_disable_win_task(pk, action, pending_action=False):
|
def enable_or_disable_win_task(pk):
|
||||||
task = AutomatedTask.objects.get(pk=pk)
|
task = AutomatedTask.objects.get(pk=pk)
|
||||||
|
|
||||||
nats_data = {
|
task.modify_task_on_agent()
|
||||||
"func": "enableschedtask",
|
|
||||||
"schedtaskpayload": {
|
|
||||||
"name": task.win_task_name,
|
|
||||||
"enabled": action,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
r = asyncio.run(task.agent.nats_cmd(nats_data))
|
|
||||||
|
|
||||||
if r != "ok":
|
|
||||||
# don't create pending action if this task was initiated by a pending action
|
|
||||||
if not pending_action:
|
|
||||||
PendingAction(
|
|
||||||
agent=task.agent,
|
|
||||||
action_type="taskaction",
|
|
||||||
details={
|
|
||||||
"action": "tasktoggle",
|
|
||||||
"value": action,
|
|
||||||
"task_id": task.id,
|
|
||||||
},
|
|
||||||
).save()
|
|
||||||
task.sync_status = "notsynced"
|
|
||||||
task.save(update_fields=["sync_status"])
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
# clear pending action since it was successful
|
|
||||||
if pending_action:
|
|
||||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
|
||||||
pendingaction.status = "completed"
|
|
||||||
pendingaction.save(update_fields=["status"])
|
|
||||||
|
|
||||||
task.sync_status = "synced"
|
|
||||||
task.save(update_fields=["sync_status"])
|
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def delete_win_task_schedule(pk, pending_action=False):
|
def delete_win_task_schedule(pk):
|
||||||
task = AutomatedTask.objects.get(pk=pk)
|
task = AutomatedTask.objects.get(pk=pk)
|
||||||
|
|
||||||
nats_data = {
|
task.delete_task_on_agent()
|
||||||
"func": "delschedtask",
|
|
||||||
"schedtaskpayload": {"name": task.win_task_name},
|
|
||||||
}
|
|
||||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
|
||||||
|
|
||||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
|
||||||
# don't create pending action if this task was initiated by a pending action
|
|
||||||
if not pending_action:
|
|
||||||
|
|
||||||
# complete any other pending actions on agent with same task_id
|
|
||||||
task.agent.remove_matching_pending_task_actions(task.id)
|
|
||||||
|
|
||||||
PendingAction(
|
|
||||||
agent=task.agent,
|
|
||||||
action_type="taskaction",
|
|
||||||
details={"action": "taskdelete", "task_id": task.id},
|
|
||||||
).save()
|
|
||||||
task.sync_status = "pendingdeletion"
|
|
||||||
task.save(update_fields=["sync_status"])
|
|
||||||
|
|
||||||
return "timeout"
|
|
||||||
|
|
||||||
# complete pending action since it was successful
|
|
||||||
if pending_action:
|
|
||||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
|
||||||
pendingaction.status = "completed"
|
|
||||||
pendingaction.save(update_fields=["status"])
|
|
||||||
|
|
||||||
# complete any other pending actions on agent with same task_id
|
|
||||||
task.agent.remove_matching_pending_task_actions(task.id)
|
|
||||||
|
|
||||||
task.delete()
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def run_win_task(pk):
|
def run_win_task(pk):
|
||||||
task = AutomatedTask.objects.get(pk=pk)
|
task = AutomatedTask.objects.get(pk=pk)
|
||||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
task.run_win_task()
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ from unittest.mock import call, patch
|
|||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
|
|
||||||
from logs.models import PendingAction
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
from .models import AutomatedTask
|
from .models import AutomatedTask
|
||||||
@@ -17,10 +16,10 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
@patch("automation.tasks.generate_agent_tasks_from_policies_task.delay")
|
@patch("automation.tasks.generate_agent_autotasks_task.delay")
|
||||||
@patch("autotasks.tasks.create_win_task_schedule.delay")
|
@patch("autotasks.tasks.create_win_task_schedule.delay")
|
||||||
def test_add_autotask(
|
def test_add_autotask(
|
||||||
self, create_win_task_schedule, generate_agent_tasks_from_policies_task
|
self, create_win_task_schedule, generate_agent_autotasks_task
|
||||||
):
|
):
|
||||||
url = "/tasks/automatedtasks/"
|
url = "/tasks/automatedtasks/"
|
||||||
|
|
||||||
@@ -29,7 +28,6 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
policy = baker.make("automation.Policy")
|
policy = baker.make("automation.Policy")
|
||||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||||
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
|
|
||||||
|
|
||||||
# test script set to invalid pk
|
# test script set to invalid pk
|
||||||
data = {"autotask": {"script": 500}}
|
data = {"autotask": {"script": 500}}
|
||||||
@@ -52,15 +50,6 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
resp = self.client.post(url, data, format="json")
|
resp = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
# test old agent version
|
|
||||||
data = {
|
|
||||||
"autotask": {"script": script.id},
|
|
||||||
"agent": old_agent.id,
|
|
||||||
}
|
|
||||||
|
|
||||||
resp = self.client.post(url, data, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 400)
|
|
||||||
|
|
||||||
# test add task to agent
|
# test add task to agent
|
||||||
data = {
|
data = {
|
||||||
"autotask": {
|
"autotask": {
|
||||||
@@ -94,13 +83,13 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
"task_type": "manual",
|
"task_type": "manual",
|
||||||
"assigned_check": None,
|
"assigned_check": None,
|
||||||
},
|
},
|
||||||
"policy": policy.id,
|
"policy": policy.id, # type: ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
resp = self.client.post(url, data, format="json")
|
resp = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
generate_agent_tasks_from_policies_task.assert_called_with(policy.id)
|
generate_agent_autotasks_task.assert_called_with(policy=policy.id) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
@@ -116,14 +105,14 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
serializer = AutoTaskSerializer(agent)
|
serializer = AutoTaskSerializer(agent)
|
||||||
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(resp.data, serializer.data)
|
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||||
@patch("automation.tasks.update_policy_task_fields_task.delay")
|
@patch("automation.tasks.update_policy_autotasks_fields_task.delay")
|
||||||
def test_update_autotask(
|
def test_update_autotask(
|
||||||
self, update_policy_task_fields_task, enable_or_disable_win_task
|
self, update_policy_autotasks_fields_task, enable_or_disable_win_task
|
||||||
):
|
):
|
||||||
# setup data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
@@ -135,32 +124,32 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
resp = self.client.patch("/tasks/500/automatedtasks/", format="json")
|
resp = self.client.patch("/tasks/500/automatedtasks/", format="json")
|
||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
url = f"/tasks/{agent_task.id}/automatedtasks/"
|
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||||
|
|
||||||
# test editing agent task
|
# test editing agent task
|
||||||
data = {"enableordisable": False}
|
data = {"enableordisable": False}
|
||||||
|
|
||||||
resp = self.client.patch(url, data, format="json")
|
resp = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
enable_or_disable_win_task.assert_called_with(pk=agent_task.id, action=False)
|
enable_or_disable_win_task.assert_called_with(pk=agent_task.id) # type: ignore
|
||||||
|
|
||||||
url = f"/tasks/{policy_task.id}/automatedtasks/"
|
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||||
|
|
||||||
# test editing policy task
|
# test editing policy task
|
||||||
data = {"enableordisable": True}
|
data = {"enableordisable": True}
|
||||||
|
|
||||||
resp = self.client.patch(url, data, format="json")
|
resp = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
update_policy_task_fields_task.assert_called_with(
|
update_policy_autotasks_fields_task.assert_called_with(
|
||||||
policy_task.id, update_agent=True
|
task=policy_task.id, update_agent=True # type: ignore
|
||||||
)
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||||
@patch("automation.tasks.delete_policy_autotask_task.delay")
|
@patch("automation.tasks.delete_policy_autotasks_task.delay")
|
||||||
def test_delete_autotask(
|
def test_delete_autotask(
|
||||||
self, delete_policy_autotask_task, delete_win_task_schedule
|
self, delete_policy_autotasks_task, delete_win_task_schedule
|
||||||
):
|
):
|
||||||
# setup data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
@@ -173,21 +162,22 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
# test delete agent task
|
# test delete agent task
|
||||||
url = f"/tasks/{agent_task.id}/automatedtasks/"
|
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||||
resp = self.client.delete(url, format="json")
|
resp = self.client.delete(url, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
delete_win_task_schedule.assert_called_with(pk=agent_task.id)
|
delete_win_task_schedule.assert_called_with(pk=agent_task.id) # type: ignore
|
||||||
|
|
||||||
# test delete policy task
|
# test delete policy task
|
||||||
url = f"/tasks/{policy_task.id}/automatedtasks/"
|
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||||
resp = self.client.delete(url, format="json")
|
resp = self.client.delete(url, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
delete_policy_autotask_task.assert_called_with(policy_task.id)
|
self.assertFalse(AutomatedTask.objects.filter(pk=policy_task.id)) # type: ignore
|
||||||
|
delete_policy_autotasks_task.assert_called_with(task=policy_task.id) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("autotasks.tasks.run_win_task.delay")
|
||||||
def test_run_autotask(self, nats_cmd):
|
def test_run_autotask(self, run_win_task):
|
||||||
# setup data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent", version="1.1.0")
|
agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||||
@@ -197,18 +187,10 @@ class TestAutotaskViews(TacticalTestCase):
|
|||||||
self.assertEqual(resp.status_code, 404)
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
# test run agent task
|
# test run agent task
|
||||||
url = f"/tasks/runwintask/{task.id}/"
|
url = f"/tasks/runwintask/{task.id}/" # type: ignore
|
||||||
resp = self.client.get(url, format="json")
|
resp = self.client.get(url, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
|
run_win_task.assert_called()
|
||||||
nats_cmd.reset_mock()
|
|
||||||
|
|
||||||
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
|
|
||||||
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
|
|
||||||
url = f"/tasks/runwintask/{task2.id}/"
|
|
||||||
resp = self.client.get(url, format="json")
|
|
||||||
self.assertEqual(resp.status_code, 400)
|
|
||||||
nats_cmd.assert_not_called()
|
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@@ -301,9 +283,9 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
run_time_bit_weekdays=127,
|
run_time_bit_weekdays=127,
|
||||||
run_time_minute="21:55",
|
run_time_minute="21:55",
|
||||||
)
|
)
|
||||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
self.assertEqual(self.task1.sync_status, "initial")
|
||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
|
||||||
self.assertEqual(nats_cmd.call_count, 1)
|
self.assertEqual(nats_cmd.call_count, 1)
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{
|
{
|
||||||
@@ -318,29 +300,16 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
"min": 55,
|
"min": 55,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
timeout=10,
|
timeout=5,
|
||||||
)
|
)
|
||||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||||
self.assertEqual(self.task1.sync_status, "synced")
|
self.assertEqual(self.task1.sync_status, "synced")
|
||||||
|
|
||||||
nats_cmd.return_value = "timeout"
|
nats_cmd.return_value = "timeout"
|
||||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
self.assertEqual(ret.status, "SUCCESS")
|
||||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
self.assertEqual(self.task1.sync_status, "initial")
|
||||||
|
|
||||||
# test pending action
|
|
||||||
self.pending_action = PendingAction.objects.create(
|
|
||||||
agent=self.agent, action_type="taskaction"
|
|
||||||
)
|
|
||||||
self.assertEqual(self.pending_action.status, "pending")
|
|
||||||
nats_cmd.return_value = "ok"
|
|
||||||
ret = create_win_task_schedule.s(
|
|
||||||
pk=self.task1.pk, pending_action=self.pending_action.pk
|
|
||||||
).apply()
|
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
|
||||||
self.pending_action = PendingAction.objects.get(pk=self.pending_action.pk)
|
|
||||||
self.assertEqual(self.pending_action.status, "completed")
|
|
||||||
|
|
||||||
# test runonce with future date
|
# test runonce with future date
|
||||||
nats_cmd.reset_mock()
|
nats_cmd.reset_mock()
|
||||||
@@ -354,7 +323,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
run_time_date=run_time_date,
|
run_time_date=run_time_date,
|
||||||
)
|
)
|
||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
|
ret = create_win_task_schedule.s(pk=self.task2.pk).apply()
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{
|
{
|
||||||
"func": "schedtask",
|
"func": "schedtask",
|
||||||
@@ -370,7 +339,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
|
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
timeout=10,
|
timeout=5,
|
||||||
)
|
)
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
self.assertEqual(ret.status, "SUCCESS")
|
||||||
|
|
||||||
@@ -386,7 +355,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
run_time_date=run_time_date,
|
run_time_date=run_time_date,
|
||||||
)
|
)
|
||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
|
ret = create_win_task_schedule.s(pk=self.task3.pk).apply()
|
||||||
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
|
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
self.assertEqual(ret.status, "SUCCESS")
|
||||||
|
|
||||||
@@ -402,7 +371,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
assigned_check=self.check,
|
assigned_check=self.check,
|
||||||
)
|
)
|
||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
|
ret = create_win_task_schedule.s(pk=self.task4.pk).apply()
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{
|
{
|
||||||
"func": "schedtask",
|
"func": "schedtask",
|
||||||
@@ -413,7 +382,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
"name": task_name,
|
"name": task_name,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
timeout=10,
|
timeout=5,
|
||||||
)
|
)
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
self.assertEqual(ret.status, "SUCCESS")
|
||||||
|
|
||||||
@@ -427,7 +396,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
task_type="manual",
|
task_type="manual",
|
||||||
)
|
)
|
||||||
nats_cmd.return_value = "ok"
|
nats_cmd.return_value = "ok"
|
||||||
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
|
ret = create_win_task_schedule.s(pk=self.task5.pk).apply()
|
||||||
nats_cmd.assert_called_with(
|
nats_cmd.assert_called_with(
|
||||||
{
|
{
|
||||||
"func": "schedtask",
|
"func": "schedtask",
|
||||||
@@ -438,6 +407,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
|||||||
"name": task_name,
|
"name": task_name,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
timeout=10,
|
timeout=5,
|
||||||
)
|
)
|
||||||
self.assertEqual(ret.status, "SUCCESS")
|
self.assertEqual(ret.status, "SUCCESS")
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import asyncio
|
|
||||||
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from rest_framework.decorators import api_view
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
@@ -11,18 +10,17 @@ from scripts.models import Script
|
|||||||
from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error
|
from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error
|
||||||
|
|
||||||
from .models import AutomatedTask
|
from .models import AutomatedTask
|
||||||
|
from .permissions import ManageAutoTaskPerms, RunAutoTaskPerms
|
||||||
from .serializers import AutoTaskSerializer, TaskSerializer
|
from .serializers import AutoTaskSerializer, TaskSerializer
|
||||||
from .tasks import (
|
|
||||||
create_win_task_schedule,
|
|
||||||
delete_win_task_schedule,
|
|
||||||
enable_or_disable_win_task,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AddAutoTask(APIView):
|
class AddAutoTask(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageAutoTaskPerms]
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
from automation.tasks import generate_agent_tasks_from_policies_task
|
from automation.tasks import generate_agent_autotasks_task
|
||||||
|
from autotasks.tasks import create_win_task_schedule
|
||||||
|
|
||||||
data = request.data
|
data = request.data
|
||||||
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
||||||
@@ -34,9 +32,6 @@ class AddAutoTask(APIView):
|
|||||||
parent = {"policy": policy}
|
parent = {"policy": policy}
|
||||||
else:
|
else:
|
||||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||||
if not agent.has_gotasks:
|
|
||||||
return notify_error("Requires agent version 1.1.1 or greater")
|
|
||||||
|
|
||||||
parent = {"agent": agent}
|
parent = {"agent": agent}
|
||||||
|
|
||||||
check = None
|
check = None
|
||||||
@@ -50,7 +45,7 @@ class AddAutoTask(APIView):
|
|||||||
del data["autotask"]["run_time_days"]
|
del data["autotask"]["run_time_days"]
|
||||||
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
obj = serializer.save(
|
task = serializer.save(
|
||||||
**parent,
|
**parent,
|
||||||
script=script,
|
script=script,
|
||||||
win_task_name=AutomatedTask.generate_task_name(),
|
win_task_name=AutomatedTask.generate_task_name(),
|
||||||
@@ -58,16 +53,18 @@ class AddAutoTask(APIView):
|
|||||||
run_time_bit_weekdays=bit_weekdays,
|
run_time_bit_weekdays=bit_weekdays,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not "policy" in data:
|
if task.agent:
|
||||||
create_win_task_schedule.delay(pk=obj.pk)
|
create_win_task_schedule.delay(pk=task.pk)
|
||||||
|
|
||||||
if "policy" in data:
|
elif task.policy:
|
||||||
generate_agent_tasks_from_policies_task.delay(data["policy"])
|
generate_agent_autotasks_task.delay(policy=task.policy.pk)
|
||||||
|
|
||||||
return Response("Task will be created shortly!")
|
return Response("Task will be created shortly!")
|
||||||
|
|
||||||
|
|
||||||
class AutoTask(APIView):
|
class AutoTask(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageAutoTaskPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
|
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
@@ -78,7 +75,7 @@ class AutoTask(APIView):
|
|||||||
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
||||||
|
|
||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
from automation.tasks import update_policy_task_fields_task
|
from automation.tasks import update_policy_autotasks_fields_task
|
||||||
|
|
||||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||||
|
|
||||||
@@ -87,49 +84,54 @@ class AutoTask(APIView):
|
|||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
if task.policy:
|
if task.policy:
|
||||||
update_policy_task_fields_task.delay(task.pk)
|
update_policy_autotasks_fields_task.delay(task=task.pk)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def patch(self, request, pk):
|
def patch(self, request, pk):
|
||||||
from automation.tasks import update_policy_task_fields_task
|
from automation.tasks import update_policy_autotasks_fields_task
|
||||||
|
from autotasks.tasks import enable_or_disable_win_task
|
||||||
|
|
||||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||||
|
|
||||||
if "enableordisable" in request.data:
|
if "enableordisable" in request.data:
|
||||||
action = request.data["enableordisable"]
|
action = request.data["enableordisable"]
|
||||||
|
|
||||||
if not task.policy:
|
|
||||||
enable_or_disable_win_task.delay(pk=task.pk, action=action)
|
|
||||||
|
|
||||||
else:
|
|
||||||
update_policy_task_fields_task.delay(task.pk, update_agent=True)
|
|
||||||
|
|
||||||
task.enabled = action
|
task.enabled = action
|
||||||
task.save(update_fields=["enabled"])
|
task.save(update_fields=["enabled"])
|
||||||
action = "enabled" if action else "disabled"
|
action = "enabled" if action else "disabled"
|
||||||
|
|
||||||
|
if task.policy:
|
||||||
|
update_policy_autotasks_fields_task.delay(
|
||||||
|
task=task.pk, update_agent=True
|
||||||
|
)
|
||||||
|
elif task.agent:
|
||||||
|
enable_or_disable_win_task.delay(pk=task.pk)
|
||||||
|
|
||||||
return Response(f"Task will be {action} shortly")
|
return Response(f"Task will be {action} shortly")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return notify_error("The request was invalid")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
from automation.tasks import delete_policy_autotask_task
|
from automation.tasks import delete_policy_autotasks_task
|
||||||
|
from autotasks.tasks import delete_win_task_schedule
|
||||||
|
|
||||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||||
|
|
||||||
if not task.policy:
|
if task.agent:
|
||||||
delete_win_task_schedule.delay(pk=task.pk)
|
delete_win_task_schedule.delay(pk=task.pk)
|
||||||
|
elif task.policy:
|
||||||
if task.policy:
|
delete_policy_autotasks_task.delay(task=task.pk)
|
||||||
delete_policy_autotask_task.delay(task.pk)
|
|
||||||
task.delete()
|
task.delete()
|
||||||
|
|
||||||
return Response(f"{task.name} will be deleted shortly")
|
return Response(f"{task.name} will be deleted shortly")
|
||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
|
@permission_classes([IsAuthenticated, RunAutoTaskPerms])
|
||||||
def run_task(request, pk):
|
def run_task(request, pk):
|
||||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
from autotasks.tasks import run_win_task
|
||||||
if not task.agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
|
|
||||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||||
|
run_win_task.delay(pk=pk)
|
||||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from model_bakery.recipe import Recipe
|
|||||||
check = Recipe("checks.Check")
|
check = Recipe("checks.Check")
|
||||||
|
|
||||||
diskspace_check = check.extend(
|
diskspace_check = check.extend(
|
||||||
check_type="diskspace", disk="C:", warning_threshold=30, error_threshold=75
|
check_type="diskspace", disk="C:", warning_threshold=30, error_threshold=10
|
||||||
)
|
)
|
||||||
|
|
||||||
cpuload_check = check.extend(
|
cpuload_check = check.extend(
|
||||||
@@ -13,7 +13,7 @@ cpuload_check = check.extend(
|
|||||||
ping_check = check.extend(check_type="ping", ip="10.10.10.10")
|
ping_check = check.extend(check_type="ping", ip="10.10.10.10")
|
||||||
|
|
||||||
memory_check = check.extend(
|
memory_check = check.extend(
|
||||||
check_type="memory", warning_threshold=30, error_threshold=75
|
check_type="memory", warning_threshold=60, error_threshold=75
|
||||||
)
|
)
|
||||||
|
|
||||||
winsvc_check = check.extend(
|
winsvc_check = check.extend(
|
||||||
@@ -21,6 +21,7 @@ winsvc_check = check.extend(
|
|||||||
svc_name="ServiceName",
|
svc_name="ServiceName",
|
||||||
svc_display_name="ServiceName",
|
svc_display_name="ServiceName",
|
||||||
svc_policy_mode="manual",
|
svc_policy_mode="manual",
|
||||||
|
pass_if_svc_not_exist=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
eventlog_check = check.extend(
|
eventlog_check = check.extend(
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-06 02:18
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0021_auto_20210212_1429'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='number_of_events_b4_alert',
|
||||||
|
field=models.PositiveIntegerField(blank=True, default=1, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/checks/migrations/0023_check_run_interval.py
Normal file
18
api/tacticalrmm/checks/migrations/0023_check_run_interval.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-06 02:59
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0022_check_number_of_events_b4_alert'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='check',
|
||||||
|
name='run_interval',
|
||||||
|
field=models.PositiveIntegerField(blank=True, default=0),
|
||||||
|
),
|
||||||
|
]
|
||||||
22
api/tacticalrmm/checks/migrations/0024_auto_20210606_1632.py
Normal file
22
api/tacticalrmm/checks/migrations/0024_auto_20210606_1632.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-06-06 16:32
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('checks', '0023_check_run_interval'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='checkhistory',
|
||||||
|
name='check_history',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='checkhistory',
|
||||||
|
name='check_id',
|
||||||
|
field=models.PositiveIntegerField(default=0),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,30 +1,19 @@
|
|||||||
import asyncio
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import string
|
import string
|
||||||
from statistics import mean
|
from statistics import mean
|
||||||
from typing import Any, List, Union
|
from typing import Any
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
from alerts.models import SEVERITY_CHOICES
|
||||||
|
from core.models import CoreSettings
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils import timezone as djangotime
|
|
||||||
from loguru import logger
|
|
||||||
from rest_framework.fields import JSONField
|
|
||||||
|
|
||||||
from alerts.models import SEVERITY_CHOICES
|
|
||||||
from core.models import CoreSettings
|
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
from .tasks import (
|
|
||||||
handle_check_email_alert_task,
|
|
||||||
handle_check_sms_alert_task,
|
|
||||||
handle_resolved_check_email_alert_task,
|
|
||||||
handle_resolved_check_sms_alert_task,
|
|
||||||
)
|
|
||||||
from .utils import bytes2human
|
|
||||||
|
|
||||||
logger.configure(**settings.LOG_CONFIG)
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
@@ -101,6 +90,7 @@ class Check(BaseAuditModel):
|
|||||||
fail_count = models.PositiveIntegerField(default=0)
|
fail_count = models.PositiveIntegerField(default=0)
|
||||||
outage_history = models.JSONField(null=True, blank=True) # store
|
outage_history = models.JSONField(null=True, blank=True) # store
|
||||||
extra_details = models.JSONField(null=True, blank=True)
|
extra_details = models.JSONField(null=True, blank=True)
|
||||||
|
run_interval = models.PositiveIntegerField(blank=True, default=0)
|
||||||
# check specific fields
|
# check specific fields
|
||||||
|
|
||||||
# for eventlog, script, ip, and service alert severity
|
# for eventlog, script, ip, and service alert severity
|
||||||
@@ -189,6 +179,9 @@ class Check(BaseAuditModel):
|
|||||||
max_length=255, choices=EVT_LOG_FAIL_WHEN_CHOICES, null=True, blank=True
|
max_length=255, choices=EVT_LOG_FAIL_WHEN_CHOICES, null=True, blank=True
|
||||||
)
|
)
|
||||||
search_last_days = models.PositiveIntegerField(null=True, blank=True)
|
search_last_days = models.PositiveIntegerField(null=True, blank=True)
|
||||||
|
number_of_events_b4_alert = models.PositiveIntegerField(
|
||||||
|
null=True, blank=True, default=1
|
||||||
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.agent:
|
if self.agent:
|
||||||
@@ -206,9 +199,9 @@ class Check(BaseAuditModel):
|
|||||||
if self.error_threshold:
|
if self.error_threshold:
|
||||||
text += f" Error Threshold: {self.error_threshold}%"
|
text += f" Error Threshold: {self.error_threshold}%"
|
||||||
|
|
||||||
return f"{self.get_check_type_display()}: Drive {self.disk} < {text}"
|
return f"{self.get_check_type_display()}: Drive {self.disk} - {text}" # type: ignore
|
||||||
elif self.check_type == "ping":
|
elif self.check_type == "ping":
|
||||||
return f"{self.get_check_type_display()}: {self.name}"
|
return f"{self.get_check_type_display()}: {self.name}" # type: ignore
|
||||||
elif self.check_type == "cpuload" or self.check_type == "memory":
|
elif self.check_type == "cpuload" or self.check_type == "memory":
|
||||||
|
|
||||||
text = ""
|
text = ""
|
||||||
@@ -217,13 +210,13 @@ class Check(BaseAuditModel):
|
|||||||
if self.error_threshold:
|
if self.error_threshold:
|
||||||
text += f" Error Threshold: {self.error_threshold}%"
|
text += f" Error Threshold: {self.error_threshold}%"
|
||||||
|
|
||||||
return f"{self.get_check_type_display()} > {text}"
|
return f"{self.get_check_type_display()} - {text}" # type: ignore
|
||||||
elif self.check_type == "winsvc":
|
elif self.check_type == "winsvc":
|
||||||
return f"{self.get_check_type_display()}: {self.svc_display_name}"
|
return f"{self.get_check_type_display()}: {self.svc_display_name}" # type: ignore
|
||||||
elif self.check_type == "eventlog":
|
elif self.check_type == "eventlog":
|
||||||
return f"{self.get_check_type_display()}: {self.name}"
|
return f"{self.get_check_type_display()}: {self.name}" # type: ignore
|
||||||
elif self.check_type == "script":
|
elif self.check_type == "script":
|
||||||
return f"{self.get_check_type_display()}: {self.script.name}"
|
return f"{self.get_check_type_display()}: {self.script.name}" # type: ignore
|
||||||
else:
|
else:
|
||||||
return "n/a"
|
return "n/a"
|
||||||
|
|
||||||
@@ -242,7 +235,7 @@ class Check(BaseAuditModel):
|
|||||||
return self.last_run
|
return self.last_run
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def non_editable_fields(self) -> List[str]:
|
def non_editable_fields(self) -> list[str]:
|
||||||
return [
|
return [
|
||||||
"check_type",
|
"check_type",
|
||||||
"status",
|
"status",
|
||||||
@@ -267,147 +260,63 @@ class Check(BaseAuditModel):
|
|||||||
"modified_time",
|
"modified_time",
|
||||||
]
|
]
|
||||||
|
|
||||||
def handle_alert(self) -> None:
|
@property
|
||||||
from alerts.models import Alert, AlertTemplate
|
def policy_fields_to_copy(self) -> list[str]:
|
||||||
|
return [
|
||||||
|
"warning_threshold",
|
||||||
|
"error_threshold",
|
||||||
|
"alert_severity",
|
||||||
|
"name",
|
||||||
|
"run_interval",
|
||||||
|
"disk",
|
||||||
|
"fails_b4_alert",
|
||||||
|
"ip",
|
||||||
|
"script",
|
||||||
|
"script_args",
|
||||||
|
"info_return_codes",
|
||||||
|
"warning_return_codes",
|
||||||
|
"timeout",
|
||||||
|
"svc_name",
|
||||||
|
"svc_display_name",
|
||||||
|
"svc_policy_mode",
|
||||||
|
"pass_if_start_pending",
|
||||||
|
"pass_if_svc_not_exist",
|
||||||
|
"restart_if_stopped",
|
||||||
|
"log_name",
|
||||||
|
"event_id",
|
||||||
|
"event_id_is_wildcard",
|
||||||
|
"event_type",
|
||||||
|
"event_source",
|
||||||
|
"event_message",
|
||||||
|
"fail_when",
|
||||||
|
"search_last_days",
|
||||||
|
"number_of_events_b4_alert",
|
||||||
|
"email_alert",
|
||||||
|
"text_alert",
|
||||||
|
"dashboard_alert",
|
||||||
|
]
|
||||||
|
|
||||||
# return if agent is in maintenance mode
|
def should_create_alert(self, alert_template=None):
|
||||||
if self.agent.maintenance_mode:
|
|
||||||
return
|
|
||||||
|
|
||||||
# see if agent has an alert template and use that
|
return (
|
||||||
alert_template: Union[AlertTemplate, None] = self.agent.get_alert_template()
|
self.dashboard_alert
|
||||||
|
or self.email_alert
|
||||||
# resolve alert if it exists
|
or self.text_alert
|
||||||
if self.status == "passing":
|
or (
|
||||||
if Alert.objects.filter(assigned_check=self, resolved=False).exists():
|
alert_template
|
||||||
alert = Alert.objects.get(assigned_check=self, resolved=False)
|
and (
|
||||||
alert.resolve()
|
alert_template.check_always_alert
|
||||||
|
or alert_template.check_always_email
|
||||||
# check if a resolved email notification should be send
|
or alert_template.check_always_text
|
||||||
if (
|
|
||||||
alert_template
|
|
||||||
and alert_template.check_email_on_resolved
|
|
||||||
and not alert.resolved_email_sent
|
|
||||||
):
|
|
||||||
handle_resolved_check_email_alert_task.delay(pk=alert.pk)
|
|
||||||
|
|
||||||
# check if resolved text should be sent
|
|
||||||
if (
|
|
||||||
alert_template
|
|
||||||
and alert_template.check_text_on_resolved
|
|
||||||
and not alert.resolved_sms_sent
|
|
||||||
):
|
|
||||||
handle_resolved_check_sms_alert_task.delay(pk=alert.pk)
|
|
||||||
|
|
||||||
# check if resolved script should be run
|
|
||||||
if (
|
|
||||||
alert_template
|
|
||||||
and alert_template.resolved_action
|
|
||||||
and not alert.resolved_action_run
|
|
||||||
):
|
|
||||||
r = self.agent.run_script(
|
|
||||||
scriptpk=alert_template.resolved_action.pk,
|
|
||||||
args=alert_template.resolved_action_args,
|
|
||||||
timeout=alert_template.resolved_action_timeout,
|
|
||||||
wait=True,
|
|
||||||
full=True,
|
|
||||||
run_on_any=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# command was successful
|
|
||||||
if type(r) == dict:
|
|
||||||
alert.resolved_action_retcode = r["retcode"]
|
|
||||||
alert.resolved_action_stdout = r["stdout"]
|
|
||||||
alert.resolved_action_stderr = r["stderr"]
|
|
||||||
alert.resolved_action_execution_time = "{:.4f}".format(
|
|
||||||
r["execution_time"]
|
|
||||||
)
|
|
||||||
alert.resolved_action_run = djangotime.now()
|
|
||||||
alert.save()
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} resolved alert for {self.check_type} check"
|
|
||||||
)
|
|
||||||
|
|
||||||
elif self.fail_count >= self.fails_b4_alert:
|
|
||||||
if not Alert.objects.filter(assigned_check=self, resolved=False).exists():
|
|
||||||
alert = Alert.create_check_alert(self)
|
|
||||||
else:
|
|
||||||
alert = Alert.objects.get(assigned_check=self, resolved=False)
|
|
||||||
|
|
||||||
# check if alert severity changed on check and update the alert
|
|
||||||
if self.alert_severity != alert.severity:
|
|
||||||
alert.severity = self.alert_severity
|
|
||||||
alert.save(update_fields=["severity"])
|
|
||||||
|
|
||||||
# create alert in dashboard if enabled
|
|
||||||
if (
|
|
||||||
self.dashboard_alert
|
|
||||||
or alert_template
|
|
||||||
and self.alert_severity in alert_template.check_dashboard_alert_severity
|
|
||||||
and alert_template.check_always_alert
|
|
||||||
):
|
|
||||||
alert.hidden = False
|
|
||||||
alert.save()
|
|
||||||
|
|
||||||
# send email if enabled
|
|
||||||
if (
|
|
||||||
not alert.email_sent
|
|
||||||
and self.email_alert
|
|
||||||
or alert_template
|
|
||||||
and self.alert_severity in alert_template.check_email_alert_severity
|
|
||||||
and alert_template.check_always_email
|
|
||||||
):
|
|
||||||
handle_check_email_alert_task.delay(
|
|
||||||
pk=alert.pk,
|
|
||||||
alert_interval=alert_template.check_periodic_alert_days
|
|
||||||
if alert_template
|
|
||||||
else None,
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
# send text if enabled
|
)
|
||||||
if (
|
|
||||||
not alert.sms_sent
|
|
||||||
and self.text_alert
|
|
||||||
or alert_template
|
|
||||||
and self.alert_severity in alert_template.check_text_alert_severity
|
|
||||||
and alert_template.check_always_text
|
|
||||||
):
|
|
||||||
handle_check_sms_alert_task.delay(
|
|
||||||
pk=alert.pk,
|
|
||||||
alert_interval=alert_template.check_periodic_alert_days
|
|
||||||
if alert_template
|
|
||||||
else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# check if any scripts should be run
|
|
||||||
if alert_template and alert_template.action and not alert.action_run:
|
|
||||||
r = self.agent.run_script(
|
|
||||||
scriptpk=alert_template.action.pk,
|
|
||||||
args=alert_template.action_args,
|
|
||||||
timeout=alert_template.action_timeout,
|
|
||||||
wait=True,
|
|
||||||
full=True,
|
|
||||||
run_on_any=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# command was successful
|
|
||||||
if type(r) == dict:
|
|
||||||
alert.action_retcode = r["retcode"]
|
|
||||||
alert.action_stdout = r["stdout"]
|
|
||||||
alert.action_stderr = r["stderr"]
|
|
||||||
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
|
||||||
alert.action_run = djangotime.now()
|
|
||||||
alert.save()
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} failure alert for {self.check_type} check{r}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_check_history(self, value: int, more_info: Any = None) -> None:
|
def add_check_history(self, value: int, more_info: Any = None) -> None:
|
||||||
CheckHistory.objects.create(check_history=self, y=value, results=more_info)
|
CheckHistory.objects.create(check_id=self.pk, y=value, results=more_info)
|
||||||
|
|
||||||
def handle_checkv2(self, data):
|
def handle_check(self, data):
|
||||||
|
from alerts.models import Alert
|
||||||
|
|
||||||
# cpuload or mem checks
|
# cpuload or mem checks
|
||||||
if self.check_type == "cpuload" or self.check_type == "memory":
|
if self.check_type == "cpuload" or self.check_type == "memory":
|
||||||
@@ -437,9 +346,6 @@ class Check(BaseAuditModel):
|
|||||||
elif self.check_type == "diskspace":
|
elif self.check_type == "diskspace":
|
||||||
if data["exists"]:
|
if data["exists"]:
|
||||||
percent_used = round(data["percent_used"])
|
percent_used = round(data["percent_used"])
|
||||||
total = bytes2human(data["total"])
|
|
||||||
free = bytes2human(data["free"])
|
|
||||||
|
|
||||||
if self.error_threshold and (100 - percent_used) < self.error_threshold:
|
if self.error_threshold and (100 - percent_used) < self.error_threshold:
|
||||||
self.status = "failing"
|
self.status = "failing"
|
||||||
self.alert_severity = "error"
|
self.alert_severity = "error"
|
||||||
@@ -453,7 +359,7 @@ class Check(BaseAuditModel):
|
|||||||
else:
|
else:
|
||||||
self.status = "passing"
|
self.status = "passing"
|
||||||
|
|
||||||
self.more_info = f"Total: {total}B, Free: {free}B"
|
self.more_info = data["more_info"]
|
||||||
|
|
||||||
# add check history
|
# add check history
|
||||||
self.add_check_history(100 - percent_used)
|
self.add_check_history(100 - percent_used)
|
||||||
@@ -469,12 +375,7 @@ class Check(BaseAuditModel):
|
|||||||
self.stdout = data["stdout"]
|
self.stdout = data["stdout"]
|
||||||
self.stderr = data["stderr"]
|
self.stderr = data["stderr"]
|
||||||
self.retcode = data["retcode"]
|
self.retcode = data["retcode"]
|
||||||
try:
|
self.execution_time = "{:.4f}".format(data["runtime"])
|
||||||
# python agent
|
|
||||||
self.execution_time = "{:.4f}".format(data["stop"] - data["start"])
|
|
||||||
except:
|
|
||||||
# golang agent
|
|
||||||
self.execution_time = "{:.4f}".format(data["runtime"])
|
|
||||||
|
|
||||||
if data["retcode"] in self.info_return_codes:
|
if data["retcode"] in self.info_return_codes:
|
||||||
self.alert_severity = "info"
|
self.alert_severity = "info"
|
||||||
@@ -510,18 +411,8 @@ class Check(BaseAuditModel):
|
|||||||
|
|
||||||
# ping checks
|
# ping checks
|
||||||
elif self.check_type == "ping":
|
elif self.check_type == "ping":
|
||||||
success = ["Reply", "bytes", "time", "TTL"]
|
self.status = data["status"]
|
||||||
output = data["output"]
|
self.more_info = data["output"]
|
||||||
|
|
||||||
if data["has_stdout"]:
|
|
||||||
if all(x in output for x in success):
|
|
||||||
self.status = "passing"
|
|
||||||
else:
|
|
||||||
self.status = "failing"
|
|
||||||
elif data["has_stderr"]:
|
|
||||||
self.status = "failing"
|
|
||||||
|
|
||||||
self.more_info = output
|
|
||||||
self.save(update_fields=["more_info"])
|
self.save(update_fields=["more_info"])
|
||||||
|
|
||||||
self.add_check_history(
|
self.add_check_history(
|
||||||
@@ -530,41 +421,8 @@ class Check(BaseAuditModel):
|
|||||||
|
|
||||||
# windows service checks
|
# windows service checks
|
||||||
elif self.check_type == "winsvc":
|
elif self.check_type == "winsvc":
|
||||||
svc_stat = data["status"]
|
self.status = data["status"]
|
||||||
self.more_info = f"Status {svc_stat.upper()}"
|
self.more_info = data["more_info"]
|
||||||
|
|
||||||
if data["exists"]:
|
|
||||||
if svc_stat == "running":
|
|
||||||
self.status = "passing"
|
|
||||||
elif svc_stat == "start_pending" and self.pass_if_start_pending:
|
|
||||||
self.status = "passing"
|
|
||||||
else:
|
|
||||||
if self.agent and self.restart_if_stopped:
|
|
||||||
nats_data = {
|
|
||||||
"func": "winsvcaction",
|
|
||||||
"payload": {"name": self.svc_name, "action": "start"},
|
|
||||||
}
|
|
||||||
r = asyncio.run(self.agent.nats_cmd(nats_data, timeout=32))
|
|
||||||
if r == "timeout" or r == "natsdown":
|
|
||||||
self.status = "failing"
|
|
||||||
elif not r["success"] and r["errormsg"]:
|
|
||||||
self.status = "failing"
|
|
||||||
elif r["success"]:
|
|
||||||
self.status = "passing"
|
|
||||||
self.more_info = f"Status RUNNING"
|
|
||||||
else:
|
|
||||||
self.status = "failing"
|
|
||||||
else:
|
|
||||||
self.status = "failing"
|
|
||||||
|
|
||||||
else:
|
|
||||||
if self.pass_if_svc_not_exist:
|
|
||||||
self.status = "passing"
|
|
||||||
else:
|
|
||||||
self.status = "failing"
|
|
||||||
|
|
||||||
self.more_info = f"Service {self.svc_name} does not exist"
|
|
||||||
|
|
||||||
self.save(update_fields=["more_info"])
|
self.save(update_fields=["more_info"])
|
||||||
|
|
||||||
self.add_check_history(
|
self.add_check_history(
|
||||||
@@ -572,57 +430,15 @@ class Check(BaseAuditModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
elif self.check_type == "eventlog":
|
elif self.check_type == "eventlog":
|
||||||
log = []
|
log = data["log"]
|
||||||
is_wildcard = self.event_id_is_wildcard
|
|
||||||
eventType = self.event_type
|
|
||||||
eventID = self.event_id
|
|
||||||
source = self.event_source
|
|
||||||
message = self.event_message
|
|
||||||
r = data["log"]
|
|
||||||
|
|
||||||
for i in r:
|
|
||||||
if i["eventType"] == eventType:
|
|
||||||
if not is_wildcard and not int(i["eventID"]) == eventID:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not source and not message:
|
|
||||||
if is_wildcard:
|
|
||||||
log.append(i)
|
|
||||||
elif int(i["eventID"]) == eventID:
|
|
||||||
log.append(i)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if source and message:
|
|
||||||
if is_wildcard:
|
|
||||||
if source in i["source"] and message in i["message"]:
|
|
||||||
log.append(i)
|
|
||||||
|
|
||||||
elif int(i["eventID"]) == eventID:
|
|
||||||
if source in i["source"] and message in i["message"]:
|
|
||||||
log.append(i)
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
if source and source in i["source"]:
|
|
||||||
if is_wildcard:
|
|
||||||
log.append(i)
|
|
||||||
elif int(i["eventID"]) == eventID:
|
|
||||||
log.append(i)
|
|
||||||
|
|
||||||
if message and message in i["message"]:
|
|
||||||
if is_wildcard:
|
|
||||||
log.append(i)
|
|
||||||
elif int(i["eventID"]) == eventID:
|
|
||||||
log.append(i)
|
|
||||||
|
|
||||||
if self.fail_when == "contains":
|
if self.fail_when == "contains":
|
||||||
if log:
|
if log and len(log) >= self.number_of_events_b4_alert:
|
||||||
self.status = "failing"
|
self.status = "failing"
|
||||||
else:
|
else:
|
||||||
self.status = "passing"
|
self.status = "passing"
|
||||||
|
|
||||||
elif self.fail_when == "not_contains":
|
elif self.fail_when == "not_contains":
|
||||||
if log:
|
if log and len(log) >= self.number_of_events_b4_alert:
|
||||||
self.status = "passing"
|
self.status = "passing"
|
||||||
else:
|
else:
|
||||||
self.status = "failing"
|
self.status = "failing"
|
||||||
@@ -640,14 +456,22 @@ class Check(BaseAuditModel):
|
|||||||
self.fail_count += 1
|
self.fail_count += 1
|
||||||
self.save(update_fields=["status", "fail_count", "alert_severity"])
|
self.save(update_fields=["status", "fail_count", "alert_severity"])
|
||||||
|
|
||||||
|
if self.fail_count >= self.fails_b4_alert:
|
||||||
|
Alert.handle_alert_failure(self)
|
||||||
|
|
||||||
elif self.status == "passing":
|
elif self.status == "passing":
|
||||||
self.fail_count = 0
|
self.fail_count = 0
|
||||||
self.save(update_fields=["status", "fail_count", "alert_severity"])
|
self.save(update_fields=["status", "fail_count", "alert_severity"])
|
||||||
|
if Alert.objects.filter(assigned_check=self, resolved=False).exists():
|
||||||
self.handle_alert()
|
Alert.handle_alert_resolve(self)
|
||||||
|
|
||||||
return self.status
|
return self.status
|
||||||
|
|
||||||
|
def handle_assigned_task(self) -> None:
|
||||||
|
for task in self.assignedtask.all(): # type: ignore
|
||||||
|
if task.enabled:
|
||||||
|
task.run_win_task()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(check):
|
def serialize(check):
|
||||||
# serializes the check and returns json
|
# serializes the check and returns json
|
||||||
@@ -672,47 +496,31 @@ class Check(BaseAuditModel):
|
|||||||
|
|
||||||
def create_policy_check(self, agent=None, policy=None):
|
def create_policy_check(self, agent=None, policy=None):
|
||||||
|
|
||||||
if not agent and not policy or agent and policy:
|
if (not agent and not policy) or (agent and policy):
|
||||||
return
|
return
|
||||||
|
|
||||||
Check.objects.create(
|
check = Check.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
policy=policy,
|
policy=policy,
|
||||||
managed_by_policy=bool(agent),
|
managed_by_policy=bool(agent),
|
||||||
parent_check=(self.pk if agent else None),
|
parent_check=(self.pk if agent else None),
|
||||||
name=self.name,
|
|
||||||
alert_severity=self.alert_severity,
|
|
||||||
check_type=self.check_type,
|
check_type=self.check_type,
|
||||||
email_alert=self.email_alert,
|
|
||||||
dashboard_alert=self.dashboard_alert,
|
|
||||||
text_alert=self.text_alert,
|
|
||||||
fails_b4_alert=self.fails_b4_alert,
|
|
||||||
extra_details=self.extra_details,
|
|
||||||
error_threshold=self.error_threshold,
|
|
||||||
warning_threshold=self.warning_threshold,
|
|
||||||
disk=self.disk,
|
|
||||||
ip=self.ip,
|
|
||||||
script=self.script,
|
script=self.script,
|
||||||
script_args=self.script_args,
|
|
||||||
timeout=self.timeout,
|
|
||||||
info_return_codes=self.info_return_codes,
|
|
||||||
warning_return_codes=self.warning_return_codes,
|
|
||||||
svc_name=self.svc_name,
|
|
||||||
svc_display_name=self.svc_display_name,
|
|
||||||
pass_if_start_pending=self.pass_if_start_pending,
|
|
||||||
pass_if_svc_not_exist=self.pass_if_svc_not_exist,
|
|
||||||
restart_if_stopped=self.restart_if_stopped,
|
|
||||||
svc_policy_mode=self.svc_policy_mode,
|
|
||||||
log_name=self.log_name,
|
|
||||||
event_id=self.event_id,
|
|
||||||
event_id_is_wildcard=self.event_id_is_wildcard,
|
|
||||||
event_type=self.event_type,
|
|
||||||
event_source=self.event_source,
|
|
||||||
event_message=self.event_message,
|
|
||||||
fail_when=self.fail_when,
|
|
||||||
search_last_days=self.search_last_days,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
for task in self.assignedtask.all(): # type: ignore
|
||||||
|
if policy or (
|
||||||
|
agent and not agent.autotasks.filter(parent_task=task.pk).exists()
|
||||||
|
):
|
||||||
|
task.create_policy_task(
|
||||||
|
agent=agent, policy=policy, assigned_check=check
|
||||||
|
)
|
||||||
|
|
||||||
|
for field in self.policy_fields_to_copy:
|
||||||
|
setattr(check, field, getattr(self, field))
|
||||||
|
|
||||||
|
check.save()
|
||||||
|
|
||||||
def is_duplicate(self, check):
|
def is_duplicate(self, check):
|
||||||
if self.check_type == "diskspace":
|
if self.check_type == "diskspace":
|
||||||
return self.disk == check.disk
|
return self.disk == check.disk
|
||||||
@@ -738,11 +546,10 @@ class Check(BaseAuditModel):
|
|||||||
def send_email(self):
|
def send_email(self):
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
|
|
||||||
body: str = ""
|
body: str = ""
|
||||||
if self.agent:
|
if self.agent:
|
||||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
|
||||||
else:
|
else:
|
||||||
subject = f"{self} Failed"
|
subject = f"{self} Failed"
|
||||||
|
|
||||||
@@ -753,12 +560,15 @@ class Check(BaseAuditModel):
|
|||||||
if self.error_threshold:
|
if self.error_threshold:
|
||||||
text += f" Error Threshold: {self.error_threshold}%"
|
text += f" Error Threshold: {self.error_threshold}%"
|
||||||
|
|
||||||
percent_used = [
|
try:
|
||||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
percent_used = [
|
||||||
][0]
|
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||||
percent_free = 100 - percent_used
|
][0]
|
||||||
|
percent_free = 100 - percent_used
|
||||||
|
|
||||||
body = subject + f" - Free: {percent_free}%, {text}"
|
body = subject + f" - Free: {percent_free}%, {text}"
|
||||||
|
except:
|
||||||
|
body = subject + f" - Disk {self.disk} does not exist"
|
||||||
|
|
||||||
elif self.check_type == "script":
|
elif self.check_type == "script":
|
||||||
|
|
||||||
@@ -787,16 +597,7 @@ class Check(BaseAuditModel):
|
|||||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||||
|
|
||||||
elif self.check_type == "winsvc":
|
elif self.check_type == "winsvc":
|
||||||
|
body = subject + f" - Status: {self.more_info}"
|
||||||
try:
|
|
||||||
status = list(
|
|
||||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
|
||||||
)[0]["status"]
|
|
||||||
# catch services that don't exist if policy check
|
|
||||||
except:
|
|
||||||
status = "Unknown"
|
|
||||||
|
|
||||||
body = subject + f" - Status: {status.upper()}"
|
|
||||||
|
|
||||||
elif self.check_type == "eventlog":
|
elif self.check_type == "eventlog":
|
||||||
|
|
||||||
@@ -820,12 +621,11 @@ class Check(BaseAuditModel):
|
|||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
CORE.send_mail(subject, body, alert_template=alert_template)
|
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||||
|
|
||||||
def send_sms(self):
|
def send_sms(self):
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
body: str = ""
|
body: str = ""
|
||||||
|
|
||||||
if self.agent:
|
if self.agent:
|
||||||
@@ -840,11 +640,15 @@ class Check(BaseAuditModel):
|
|||||||
if self.error_threshold:
|
if self.error_threshold:
|
||||||
text += f" Error Threshold: {self.error_threshold}%"
|
text += f" Error Threshold: {self.error_threshold}%"
|
||||||
|
|
||||||
percent_used = [
|
try:
|
||||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
percent_used = [
|
||||||
][0]
|
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||||
percent_free = 100 - percent_used
|
][0]
|
||||||
body = subject + f" - Free: {percent_free}%, {text}"
|
percent_free = 100 - percent_used
|
||||||
|
body = subject + f" - Free: {percent_free}%, {text}"
|
||||||
|
except:
|
||||||
|
body = subject + f" - Disk {self.disk} does not exist"
|
||||||
|
|
||||||
elif self.check_type == "script":
|
elif self.check_type == "script":
|
||||||
body = subject + f" - Return code: {self.retcode}"
|
body = subject + f" - Return code: {self.retcode}"
|
||||||
elif self.check_type == "ping":
|
elif self.check_type == "ping":
|
||||||
@@ -862,39 +666,32 @@ class Check(BaseAuditModel):
|
|||||||
elif self.check_type == "memory":
|
elif self.check_type == "memory":
|
||||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||||
elif self.check_type == "winsvc":
|
elif self.check_type == "winsvc":
|
||||||
status = list(
|
body = subject + f" - Status: {self.more_info}"
|
||||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
|
||||||
)[0]["status"]
|
|
||||||
body = subject + f" - Status: {status.upper()}"
|
|
||||||
elif self.check_type == "eventlog":
|
elif self.check_type == "eventlog":
|
||||||
body = subject
|
body = subject
|
||||||
|
|
||||||
CORE.send_sms(body, alert_template=alert_template)
|
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||||
|
|
||||||
def send_resolved_email(self):
|
def send_resolved_email(self):
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||||
body = f"{self} is now back to normal"
|
body = f"{self} is now back to normal"
|
||||||
|
|
||||||
CORE.send_mail(subject, body, alert_template=alert_template)
|
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||||
|
|
||||||
def send_resolved_sms(self):
|
def send_resolved_sms(self):
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = CoreSettings.objects.first()
|
||||||
alert_template = self.agent.get_alert_template()
|
|
||||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||||
CORE.send_sms(subject, alert_template=alert_template)
|
CORE.send_sms(subject, alert_template=self.agent.alert_template)
|
||||||
|
|
||||||
|
|
||||||
class CheckHistory(models.Model):
|
class CheckHistory(models.Model):
|
||||||
check_history = models.ForeignKey(
|
check_id = models.PositiveIntegerField(default=0)
|
||||||
Check,
|
|
||||||
related_name="check_history",
|
|
||||||
on_delete=models.CASCADE,
|
|
||||||
)
|
|
||||||
x = models.DateTimeField(auto_now_add=True)
|
x = models.DateTimeField(auto_now_add=True)
|
||||||
y = models.PositiveIntegerField(null=True, blank=True, default=None)
|
y = models.PositiveIntegerField(null=True, blank=True, default=None)
|
||||||
results = models.JSONField(null=True, blank=True)
|
results = models.JSONField(null=True, blank=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.check_history.readable_desc
|
return self.x
|
||||||
|
|||||||
16
api/tacticalrmm/checks/permissions.py
Normal file
16
api/tacticalrmm/checks/permissions.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm
|
||||||
|
|
||||||
|
|
||||||
|
class ManageChecksPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_checks")
|
||||||
|
|
||||||
|
|
||||||
|
class RunChecksPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
return _has_perm(r, "can_run_checks")
|
||||||
@@ -6,6 +6,7 @@ from autotasks.models import AutomatedTask
|
|||||||
from scripts.serializers import ScriptCheckSerializer, ScriptSerializer
|
from scripts.serializers import ScriptCheckSerializer, ScriptSerializer
|
||||||
|
|
||||||
from .models import Check, CheckHistory
|
from .models import Check, CheckHistory
|
||||||
|
from scripts.models import Script
|
||||||
|
|
||||||
|
|
||||||
class AssignedTaskField(serializers.ModelSerializer):
|
class AssignedTaskField(serializers.ModelSerializer):
|
||||||
@@ -25,7 +26,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
def get_alert_template(self, obj):
|
def get_alert_template(self, obj):
|
||||||
if obj.agent:
|
if obj.agent:
|
||||||
alert_template = obj.agent.get_alert_template()
|
alert_template = obj.agent.alert_template
|
||||||
else:
|
else:
|
||||||
alert_template = None
|
alert_template = None
|
||||||
|
|
||||||
@@ -158,13 +159,16 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
|
|||||||
|
|
||||||
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||||
# only send data needed for agent to run a check
|
# only send data needed for agent to run a check
|
||||||
assigned_tasks = serializers.SerializerMethodField()
|
|
||||||
script = ScriptCheckSerializer(read_only=True)
|
script = ScriptCheckSerializer(read_only=True)
|
||||||
|
script_args = serializers.SerializerMethodField()
|
||||||
|
|
||||||
def get_assigned_tasks(self, obj):
|
def get_script_args(self, obj):
|
||||||
if obj.assignedtask.exists():
|
if obj.check_type != "script":
|
||||||
tasks = obj.assignedtask.all()
|
return []
|
||||||
return AssignedTaskCheckRunnerField(tasks, many=True).data
|
|
||||||
|
return Script.parse_script_args(
|
||||||
|
agent=obj.agent, shell=obj.script.shell, args=obj.script_args
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Check
|
model = Check
|
||||||
@@ -193,6 +197,7 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
|||||||
"modified_by",
|
"modified_by",
|
||||||
"modified_time",
|
"modified_time",
|
||||||
"history",
|
"history",
|
||||||
|
"dashboard_alert",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,22 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
self.authenticate()
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
|
||||||
|
def test_delete_agent_check(self):
|
||||||
|
# setup data
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||||
|
|
||||||
|
resp = self.client.delete("/checks/500/check/", format="json")
|
||||||
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
url = f"/checks/{check.pk}/check/"
|
||||||
|
|
||||||
|
resp = self.client.delete(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertFalse(agent.agentchecks.all())
|
||||||
|
|
||||||
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
def test_get_disk_check(self):
|
def test_get_disk_check(self):
|
||||||
# setup data
|
# setup data
|
||||||
disk_check = baker.make_recipe("checks.diskspace_check")
|
disk_check = baker.make_recipe("checks.diskspace_check")
|
||||||
@@ -24,7 +40,7 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
serializer = CheckSerializer(disk_check)
|
serializer = CheckSerializer(disk_check)
|
||||||
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(resp.data, serializer.data)
|
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_add_disk_check(self):
|
def test_add_disk_check(self):
|
||||||
@@ -211,7 +227,7 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
serializer = CheckSerializer(disk_check)
|
serializer = CheckSerializer(disk_check)
|
||||||
|
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(resp.data, serializer.data)
|
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
def test_add_policy_disk_check(self):
|
def test_add_policy_disk_check(self):
|
||||||
@@ -221,7 +237,7 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
url = "/checks/checks/"
|
url = "/checks/checks/"
|
||||||
|
|
||||||
valid_payload = {
|
valid_payload = {
|
||||||
"policy": policy.pk,
|
"policy": policy.pk, # type: ignore
|
||||||
"check": {
|
"check": {
|
||||||
"check_type": "diskspace",
|
"check_type": "diskspace",
|
||||||
"disk": "M:",
|
"disk": "M:",
|
||||||
@@ -233,7 +249,7 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
|
|
||||||
# should fail because both error and warning thresholds are 0
|
# should fail because both error and warning thresholds are 0
|
||||||
invalid_payload = {
|
invalid_payload = {
|
||||||
"policy": policy.pk,
|
"policy": policy.pk, # type: ignore
|
||||||
"check": {
|
"check": {
|
||||||
"check_type": "diskspace",
|
"check_type": "diskspace",
|
||||||
"error_threshold": 0,
|
"error_threshold": 0,
|
||||||
@@ -247,7 +263,7 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
|
|
||||||
# should fail because warning is less than error
|
# should fail because warning is less than error
|
||||||
invalid_payload = {
|
invalid_payload = {
|
||||||
"policy": policy.pk,
|
"policy": policy.pk, # type: ignore
|
||||||
"check": {
|
"check": {
|
||||||
"check_type": "diskspace",
|
"check_type": "diskspace",
|
||||||
"error_threshold": 80,
|
"error_threshold": 80,
|
||||||
@@ -261,7 +277,7 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
|
|
||||||
# this should fail because we already have a check for drive M: in setup
|
# this should fail because we already have a check for drive M: in setup
|
||||||
invalid_payload = {
|
invalid_payload = {
|
||||||
"policy": policy.pk,
|
"policy": policy.pk, # type: ignore
|
||||||
"check": {
|
"check": {
|
||||||
"check_type": "diskspace",
|
"check_type": "diskspace",
|
||||||
"disk": "M:",
|
"disk": "M:",
|
||||||
@@ -277,8 +293,8 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
def test_get_disks_for_policies(self):
|
def test_get_disks_for_policies(self):
|
||||||
url = "/checks/getalldisks/"
|
url = "/checks/getalldisks/"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertIsInstance(r.data, list)
|
self.assertIsInstance(r.data, list) # type: ignore
|
||||||
self.assertEqual(26, len(r.data))
|
self.assertEqual(26, len(r.data)) # type: ignore
|
||||||
|
|
||||||
def test_edit_check_alert(self):
|
def test_edit_check_alert(self):
|
||||||
# setup data
|
# setup data
|
||||||
@@ -310,14 +326,8 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
@patch("agents.models.Agent.nats_cmd")
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
def test_run_checks(self, nats_cmd):
|
def test_run_checks(self, nats_cmd):
|
||||||
agent = baker.make_recipe("agents.agent", version="1.4.1")
|
agent = baker.make_recipe("agents.agent", version="1.4.1")
|
||||||
agent_old = baker.make_recipe("agents.agent", version="1.0.2")
|
|
||||||
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
|
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
|
||||||
|
|
||||||
url = f"/checks/runchecks/{agent_old.pk}/"
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater")
|
|
||||||
|
|
||||||
url = f"/checks/runchecks/{agent_b4_141.pk}/"
|
url = f"/checks/runchecks/{agent_b4_141.pk}/"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
@@ -353,16 +363,16 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
# setup data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||||
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
baker.make("checks.CheckHistory", check_id=check.id, _quantity=30)
|
||||||
check_history_data = baker.make(
|
check_history_data = baker.make(
|
||||||
"checks.CheckHistory",
|
"checks.CheckHistory",
|
||||||
check_history=check,
|
check_id=check.id,
|
||||||
_quantity=30,
|
_quantity=30,
|
||||||
)
|
)
|
||||||
|
|
||||||
# need to manually set the date back 35 days
|
# need to manually set the date back 35 days
|
||||||
for check_history in check_history_data:
|
for check_history in check_history_data: # type: ignore
|
||||||
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
check_history.x = djangotime.now() - djangotime.timedelta(days=35) # type: ignore
|
||||||
check_history.save()
|
check_history.save()
|
||||||
|
|
||||||
# test invalid check pk
|
# test invalid check pk
|
||||||
@@ -375,36 +385,38 @@ class TestCheckViews(TacticalTestCase):
|
|||||||
data = {"timeFilter": 30}
|
data = {"timeFilter": 30}
|
||||||
resp = self.client.patch(url, data, format="json")
|
resp = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(len(resp.data), 30)
|
self.assertEqual(len(resp.data), 30) # type: ignore
|
||||||
|
|
||||||
# test with timeFilter equal to 0
|
# test with timeFilter equal to 0
|
||||||
data = {"timeFilter": 0}
|
data = {"timeFilter": 0}
|
||||||
resp = self.client.patch(url, data, format="json")
|
resp = self.client.patch(url, data, format="json")
|
||||||
self.assertEqual(resp.status_code, 200)
|
self.assertEqual(resp.status_code, 200)
|
||||||
self.assertEqual(len(resp.data), 60)
|
self.assertEqual(len(resp.data), 60) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
class TestCheckTasks(TacticalTestCase):
|
class TestCheckTasks(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.authenticate()
|
||||||
self.setup_coresettings()
|
self.setup_coresettings()
|
||||||
|
self.agent = baker.make_recipe("agents.agent", version="1.5.7")
|
||||||
|
|
||||||
def test_prune_check_history(self):
|
def test_prune_check_history(self):
|
||||||
from .tasks import prune_check_history
|
from .tasks import prune_check_history
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
check = baker.make_recipe("checks.diskspace_check")
|
check = baker.make_recipe("checks.diskspace_check")
|
||||||
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
baker.make("checks.CheckHistory", check_id=check.id, _quantity=30)
|
||||||
check_history_data = baker.make(
|
check_history_data = baker.make(
|
||||||
"checks.CheckHistory",
|
"checks.CheckHistory",
|
||||||
check_history=check,
|
check_id=check.id,
|
||||||
_quantity=30,
|
_quantity=30,
|
||||||
)
|
)
|
||||||
|
|
||||||
# need to manually set the date back 35 days
|
# need to manually set the date back 35 days
|
||||||
for check_history in check_history_data:
|
for check_history in check_history_data: # type: ignore
|
||||||
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
check_history.x = djangotime.now() - djangotime.timedelta(days=35) # type: ignore
|
||||||
check_history.save()
|
check_history.save()
|
||||||
|
|
||||||
# prune data 30 days old
|
# prune data 30 days old
|
||||||
@@ -414,3 +426,720 @@ class TestCheckTasks(TacticalTestCase):
|
|||||||
# prune all Check history Data
|
# prune all Check history Data
|
||||||
prune_check_history(0)
|
prune_check_history(0)
|
||||||
self.assertEqual(CheckHistory.objects.count(), 0)
|
self.assertEqual(CheckHistory.objects.count(), 0)
|
||||||
|
|
||||||
|
def test_handle_script_check(self):
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
url = "/api/v3/checkrunner/"
|
||||||
|
|
||||||
|
script = baker.make_recipe("checks.script_check", agent=self.agent)
|
||||||
|
|
||||||
|
# test failing
|
||||||
|
data = {
|
||||||
|
"id": script.id,
|
||||||
|
"retcode": 500,
|
||||||
|
"stderr": "error",
|
||||||
|
"stdout": "message",
|
||||||
|
"runtime": 5.000,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=script.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test passing
|
||||||
|
data = {
|
||||||
|
"id": script.id,
|
||||||
|
"retcode": 0,
|
||||||
|
"stderr": "error",
|
||||||
|
"stdout": "message",
|
||||||
|
"runtime": 5.000,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=script.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test failing info
|
||||||
|
script.info_return_codes = [20, 30, 50]
|
||||||
|
script.save()
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"id": script.id,
|
||||||
|
"retcode": 30,
|
||||||
|
"stderr": "error",
|
||||||
|
"stdout": "message",
|
||||||
|
"runtime": 5.000,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=script.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "info")
|
||||||
|
|
||||||
|
# test failing warning
|
||||||
|
script.warning_return_codes = [80, 100, 1040]
|
||||||
|
script.save()
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"id": script.id,
|
||||||
|
"retcode": 1040,
|
||||||
|
"stderr": "error",
|
||||||
|
"stdout": "message",
|
||||||
|
"runtime": 5.000,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=script.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
def test_handle_diskspace_check(self):
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
url = "/api/v3/checkrunner/"
|
||||||
|
|
||||||
|
diskspace = baker.make_recipe(
|
||||||
|
"checks.diskspace_check",
|
||||||
|
warning_threshold=20,
|
||||||
|
error_threshold=10,
|
||||||
|
agent=self.agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
# test warning threshold failure
|
||||||
|
data = {
|
||||||
|
"id": diskspace.id,
|
||||||
|
"exists": True,
|
||||||
|
"percent_used": 85,
|
||||||
|
"total": 500,
|
||||||
|
"free": 400,
|
||||||
|
"more_info": "More info",
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=diskspace.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
# test error failure
|
||||||
|
data = {
|
||||||
|
"id": diskspace.id,
|
||||||
|
"exists": True,
|
||||||
|
"percent_used": 95,
|
||||||
|
"total": 500,
|
||||||
|
"free": 400,
|
||||||
|
"more_info": "More info",
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=diskspace.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test disk not exist
|
||||||
|
data = {"id": diskspace.id, "exists": False}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=diskspace.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test warning threshold 0
|
||||||
|
diskspace.warning_threshold = 0
|
||||||
|
diskspace.save()
|
||||||
|
data = {
|
||||||
|
"id": diskspace.id,
|
||||||
|
"exists": True,
|
||||||
|
"percent_used": 95,
|
||||||
|
"total": 500,
|
||||||
|
"free": 400,
|
||||||
|
"more_info": "More info",
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=diskspace.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test error threshold 0
|
||||||
|
diskspace.warning_threshold = 50
|
||||||
|
diskspace.error_threshold = 0
|
||||||
|
diskspace.save()
|
||||||
|
data = {
|
||||||
|
"id": diskspace.id,
|
||||||
|
"exists": True,
|
||||||
|
"percent_used": 95,
|
||||||
|
"total": 500,
|
||||||
|
"free": 400,
|
||||||
|
"more_info": "More info",
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=diskspace.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
# test passing
|
||||||
|
data = {
|
||||||
|
"id": diskspace.id,
|
||||||
|
"exists": True,
|
||||||
|
"percent_used": 50,
|
||||||
|
"total": 500,
|
||||||
|
"free": 400,
|
||||||
|
"more_info": "More info",
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=diskspace.id)
|
||||||
|
|
||||||
|
self.assertEqual(new_check.status, "passing")
|
||||||
|
|
||||||
|
def test_handle_cpuload_check(self):
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
url = "/api/v3/checkrunner/"
|
||||||
|
|
||||||
|
cpuload = baker.make_recipe(
|
||||||
|
"checks.cpuload_check",
|
||||||
|
warning_threshold=70,
|
||||||
|
error_threshold=90,
|
||||||
|
agent=self.agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
# test failing warning
|
||||||
|
data = {"id": cpuload.id, "percent": 80}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=cpuload.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
# test failing error
|
||||||
|
data = {"id": cpuload.id, "percent": 95}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
cpuload.history = []
|
||||||
|
cpuload.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=cpuload.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test passing
|
||||||
|
data = {"id": cpuload.id, "percent": 50}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
cpuload.history = []
|
||||||
|
cpuload.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=cpuload.id)
|
||||||
|
self.assertEqual(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test warning threshold 0
|
||||||
|
cpuload.warning_threshold = 0
|
||||||
|
cpuload.save()
|
||||||
|
data = {"id": cpuload.id, "percent": 95}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
cpuload.history = []
|
||||||
|
cpuload.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=cpuload.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test error threshold 0
|
||||||
|
cpuload.warning_threshold = 50
|
||||||
|
cpuload.error_threshold = 0
|
||||||
|
cpuload.save()
|
||||||
|
data = {"id": cpuload.id, "percent": 95}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
cpuload.history = []
|
||||||
|
cpuload.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=cpuload.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
def test_handle_memory_check(self):
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
url = "/api/v3/checkrunner/"
|
||||||
|
|
||||||
|
memory = baker.make_recipe(
|
||||||
|
"checks.memory_check",
|
||||||
|
warning_threshold=70,
|
||||||
|
error_threshold=90,
|
||||||
|
agent=self.agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
# test failing warning
|
||||||
|
data = {"id": memory.id, "percent": 80}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=memory.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
# test failing error
|
||||||
|
data = {"id": memory.id, "percent": 95}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
memory.history = []
|
||||||
|
memory.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=memory.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test passing
|
||||||
|
data = {"id": memory.id, "percent": 50}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
memory.history = []
|
||||||
|
memory.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=memory.id)
|
||||||
|
self.assertEqual(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test warning threshold 0
|
||||||
|
memory.warning_threshold = 0
|
||||||
|
memory.save()
|
||||||
|
data = {"id": memory.id, "percent": 95}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
memory.history = []
|
||||||
|
memory.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=memory.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test error threshold 0
|
||||||
|
memory.warning_threshold = 50
|
||||||
|
memory.error_threshold = 0
|
||||||
|
memory.save()
|
||||||
|
data = {"id": memory.id, "percent": 95}
|
||||||
|
|
||||||
|
# reset check history
|
||||||
|
memory.history = []
|
||||||
|
memory.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=memory.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
def test_handle_ping_check(self):
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
url = "/api/v3/checkrunner/"
|
||||||
|
|
||||||
|
ping = baker.make_recipe(
|
||||||
|
"checks.ping_check", agent=self.agent, alert_severity="info"
|
||||||
|
)
|
||||||
|
|
||||||
|
# test failing info
|
||||||
|
data = {"id": ping.id, "status": "failing", "output": "reply from a.com"}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=ping.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "info")
|
||||||
|
|
||||||
|
# test failing warning
|
||||||
|
ping.alert_severity = "warning"
|
||||||
|
ping.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=ping.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
|
||||||
|
# test failing error
|
||||||
|
ping.alert_severity = "error"
|
||||||
|
ping.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=ping.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test failing error
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=ping.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test passing
|
||||||
|
data = {"id": ping.id, "status": "passing", "output": "reply from a.com"}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=ping.id)
|
||||||
|
self.assertEqual(new_check.status, "passing")
|
||||||
|
|
||||||
|
@patch("agents.models.Agent.nats_cmd")
|
||||||
|
def test_handle_winsvc_check(self, nats_cmd):
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
url = "/api/v3/checkrunner/"
|
||||||
|
|
||||||
|
winsvc = baker.make_recipe(
|
||||||
|
"checks.winsvc_check", agent=self.agent, alert_severity="info"
|
||||||
|
)
|
||||||
|
|
||||||
|
# test passing running
|
||||||
|
data = {"id": winsvc.id, "status": "passing", "more_info": "ok"}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=winsvc.id)
|
||||||
|
self.assertEqual(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test failing
|
||||||
|
data = {"id": winsvc.id, "status": "failing", "more_info": "ok"}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=winsvc.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "info")
|
||||||
|
|
||||||
|
""" # test failing and attempt start
|
||||||
|
winsvc.restart_if_stopped = True
|
||||||
|
winsvc.alert_severity = "warning"
|
||||||
|
winsvc.save()
|
||||||
|
|
||||||
|
nats_cmd.return_value = "timeout"
|
||||||
|
|
||||||
|
data = {"id": winsvc.id, "exists": True, "status": "not running"}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=winsvc.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "warning")
|
||||||
|
nats_cmd.assert_called()
|
||||||
|
nats_cmd.reset_mock()
|
||||||
|
|
||||||
|
# test failing and attempt start
|
||||||
|
winsvc.alert_severity = "error"
|
||||||
|
winsvc.save()
|
||||||
|
nats_cmd.return_value = {"success": False, "errormsg": "Some Error"}
|
||||||
|
|
||||||
|
data = {"id": winsvc.id, "exists": True, "status": "not running"}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=winsvc.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
self.assertEqual(new_check.alert_severity, "error")
|
||||||
|
nats_cmd.assert_called()
|
||||||
|
nats_cmd.reset_mock()
|
||||||
|
|
||||||
|
# test success and attempt start
|
||||||
|
nats_cmd.return_value = {"success": True}
|
||||||
|
|
||||||
|
data = {"id": winsvc.id, "exists": True, "status": "not running"}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=winsvc.id)
|
||||||
|
self.assertEqual(new_check.status, "passing")
|
||||||
|
nats_cmd.assert_called()
|
||||||
|
nats_cmd.reset_mock()
|
||||||
|
|
||||||
|
# test failing and service not exist
|
||||||
|
data = {"id": winsvc.id, "exists": False, "status": ""}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=winsvc.id)
|
||||||
|
self.assertEqual(new_check.status, "failing")
|
||||||
|
|
||||||
|
# test success and service not exist
|
||||||
|
winsvc.pass_if_svc_not_exist = True
|
||||||
|
winsvc.save()
|
||||||
|
data = {"id": winsvc.id, "exists": False, "status": ""}
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=winsvc.id)
|
||||||
|
self.assertEqual(new_check.status, "passing") """
|
||||||
|
|
||||||
|
""" def test_handle_eventlog_check(self):
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
url = "/api/v3/checkrunner/"
|
||||||
|
|
||||||
|
eventlog = baker.make_recipe(
|
||||||
|
"checks.eventlog_check",
|
||||||
|
event_type="warning",
|
||||||
|
fail_when="contains",
|
||||||
|
event_id=123,
|
||||||
|
alert_severity="warning",
|
||||||
|
agent=self.agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"id": eventlog.id,
|
||||||
|
"log": [
|
||||||
|
{
|
||||||
|
"eventType": "warning",
|
||||||
|
"eventID": 150,
|
||||||
|
"source": "source",
|
||||||
|
"message": "a test message",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"eventType": "warning",
|
||||||
|
"eventID": 123,
|
||||||
|
"source": "source",
|
||||||
|
"message": "a test message",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"eventType": "error",
|
||||||
|
"eventID": 123,
|
||||||
|
"source": "source",
|
||||||
|
"message": "a test message",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"eventType": "error",
|
||||||
|
"eventID": 123,
|
||||||
|
"source": "source",
|
||||||
|
"message": "a test message",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
# test failing when contains
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.alert_severity, "warning")
|
||||||
|
self.assertEquals(new_check.status, "failing")
|
||||||
|
|
||||||
|
# test passing when not contains and message
|
||||||
|
eventlog.event_message = "doesnt exist"
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test failing when not contains and message and source
|
||||||
|
eventlog.fail_when = "not_contains"
|
||||||
|
eventlog.alert_severity = "error"
|
||||||
|
eventlog.event_message = "doesnt exist"
|
||||||
|
eventlog.event_source = "doesnt exist"
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "failing")
|
||||||
|
self.assertEquals(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test passing when contains with source and message
|
||||||
|
eventlog.event_message = "test"
|
||||||
|
eventlog.event_source = "source"
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test failing with wildcard not contains and source
|
||||||
|
eventlog.event_id_is_wildcard = True
|
||||||
|
eventlog.event_source = "doesn't exist"
|
||||||
|
eventlog.event_message = ""
|
||||||
|
eventlog.event_id = 0
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "failing")
|
||||||
|
self.assertEquals(new_check.alert_severity, "error")
|
||||||
|
|
||||||
|
# test passing with wildcard contains
|
||||||
|
eventlog.event_source = ""
|
||||||
|
eventlog.event_message = ""
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test failing with wildcard contains and message
|
||||||
|
eventlog.fail_when = "contains"
|
||||||
|
eventlog.event_type = "error"
|
||||||
|
eventlog.alert_severity = "info"
|
||||||
|
eventlog.event_message = "test"
|
||||||
|
eventlog.event_source = ""
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "failing")
|
||||||
|
self.assertEquals(new_check.alert_severity, "info")
|
||||||
|
|
||||||
|
# test passing with wildcard not contains message and source
|
||||||
|
eventlog.event_message = "doesnt exist"
|
||||||
|
eventlog.event_source = "doesnt exist"
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "passing")
|
||||||
|
|
||||||
|
# test multiple events found and contains
|
||||||
|
# this should pass since only two events are found
|
||||||
|
eventlog.number_of_events_b4_alert = 3
|
||||||
|
eventlog.event_id_is_wildcard = False
|
||||||
|
eventlog.event_source = None
|
||||||
|
eventlog.event_message = None
|
||||||
|
eventlog.event_id = 123
|
||||||
|
eventlog.event_type = "error"
|
||||||
|
eventlog.fail_when = "contains"
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "passing")
|
||||||
|
|
||||||
|
# this should pass since there are two events returned
|
||||||
|
eventlog.number_of_events_b4_alert = 2
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "failing")
|
||||||
|
|
||||||
|
# test not contains
|
||||||
|
# this should fail since only two events are found
|
||||||
|
eventlog.number_of_events_b4_alert = 3
|
||||||
|
eventlog.event_id_is_wildcard = False
|
||||||
|
eventlog.event_source = None
|
||||||
|
eventlog.event_message = None
|
||||||
|
eventlog.event_id = 123
|
||||||
|
eventlog.event_type = "error"
|
||||||
|
eventlog.fail_when = "not_contains"
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "failing")
|
||||||
|
|
||||||
|
# this should pass since there are two events returned
|
||||||
|
eventlog.number_of_events_b4_alert = 2
|
||||||
|
eventlog.save()
|
||||||
|
|
||||||
|
resp = self.client.patch(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
new_check = Check.objects.get(pk=eventlog.id)
|
||||||
|
|
||||||
|
self.assertEquals(new_check.status, "passing") """
|
||||||
|
|||||||
@@ -8,5 +8,5 @@ urlpatterns = [
|
|||||||
path("<pk>/loadchecks/", views.load_checks),
|
path("<pk>/loadchecks/", views.load_checks),
|
||||||
path("getalldisks/", views.get_disks_for_policies),
|
path("getalldisks/", views.get_disks_for_policies),
|
||||||
path("runchecks/<pk>/", views.run_checks),
|
path("runchecks/<pk>/", views.run_checks),
|
||||||
path("history/<int:checkpk>/", views.CheckHistory.as_view()),
|
path("history/<int:checkpk>/", views.GetCheckHistory.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -5,26 +5,27 @@ from django.db.models import Q
|
|||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
from rest_framework.decorators import api_view
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from automation.models import Policy
|
from automation.models import Policy
|
||||||
from automation.tasks import (
|
|
||||||
delete_policy_check_task,
|
|
||||||
generate_agent_checks_from_policies_task,
|
|
||||||
update_policy_check_fields_task,
|
|
||||||
)
|
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .models import Check
|
from .models import Check, CheckHistory
|
||||||
|
from .permissions import ManageChecksPerms, RunChecksPerms
|
||||||
from .serializers import CheckHistorySerializer, CheckSerializer
|
from .serializers import CheckHistorySerializer, CheckSerializer
|
||||||
|
|
||||||
|
|
||||||
class AddCheck(APIView):
|
class AddCheck(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageChecksPerms]
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
policy = None
|
policy = None
|
||||||
agent = None
|
agent = None
|
||||||
|
|
||||||
@@ -53,40 +54,49 @@ class AddCheck(APIView):
|
|||||||
data=request.data["check"], partial=True, context=parent
|
data=request.data["check"], partial=True, context=parent
|
||||||
)
|
)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
obj = serializer.save(**parent, script=script)
|
new_check = serializer.save(**parent, script=script)
|
||||||
|
|
||||||
# Generate policy Checks
|
# Generate policy Checks
|
||||||
if policy:
|
if policy:
|
||||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
|
generate_agent_checks_task.delay(policy=policy.pk)
|
||||||
elif agent:
|
elif agent:
|
||||||
checks = agent.agentchecks.filter(
|
checks = agent.agentchecks.filter( # type: ignore
|
||||||
check_type=obj.check_type, managed_by_policy=True
|
check_type=new_check.check_type, managed_by_policy=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Should only be one
|
# Should only be one
|
||||||
duplicate_check = [check for check in checks if check.is_duplicate(obj)]
|
duplicate_check = [
|
||||||
|
check for check in checks if check.is_duplicate(new_check)
|
||||||
|
]
|
||||||
|
|
||||||
if duplicate_check:
|
if duplicate_check:
|
||||||
policy = Check.objects.get(pk=duplicate_check[0].parent_check).policy
|
policy = Check.objects.get(pk=duplicate_check[0].parent_check).policy
|
||||||
if policy.enforced:
|
if policy.enforced:
|
||||||
obj.overriden_by_policy = True
|
new_check.overriden_by_policy = True
|
||||||
obj.save()
|
new_check.save()
|
||||||
else:
|
else:
|
||||||
duplicate_check[0].delete()
|
duplicate_check[0].delete()
|
||||||
|
|
||||||
return Response(f"{obj.readable_desc} was added!")
|
return Response(f"{new_check.readable_desc} was added!")
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteCheck(APIView):
|
class GetUpdateDeleteCheck(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageChecksPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
check = get_object_or_404(Check, pk=pk)
|
check = get_object_or_404(Check, pk=pk)
|
||||||
return Response(CheckSerializer(check).data)
|
return Response(CheckSerializer(check).data)
|
||||||
|
|
||||||
def patch(self, request, pk):
|
def patch(self, request, pk):
|
||||||
|
from automation.tasks import update_policy_check_fields_task
|
||||||
|
|
||||||
check = get_object_or_404(Check, pk=pk)
|
check = get_object_or_404(Check, pk=pk)
|
||||||
|
|
||||||
# remove fields that should not be changed when editing a check from the frontend
|
# remove fields that should not be changed when editing a check from the frontend
|
||||||
if "check_alert" not in request.data.keys():
|
if (
|
||||||
|
"check_alert" not in request.data.keys()
|
||||||
|
and "check_reset" not in request.data.keys()
|
||||||
|
):
|
||||||
[request.data.pop(i) for i in check.non_editable_fields]
|
[request.data.pop(i) for i in check.non_editable_fields]
|
||||||
|
|
||||||
# set event id to 0 if wildcard because it needs to be an integer field for db
|
# set event id to 0 if wildcard because it needs to be an integer field for db
|
||||||
@@ -102,31 +112,32 @@ class GetUpdateDeleteCheck(APIView):
|
|||||||
|
|
||||||
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
|
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
obj = serializer.save()
|
check = serializer.save()
|
||||||
|
|
||||||
|
# resolve any alerts that are open
|
||||||
|
if "check_reset" in request.data.keys():
|
||||||
|
if check.alert.filter(resolved=False).exists():
|
||||||
|
check.alert.get(resolved=False).resolve()
|
||||||
|
|
||||||
# Update policy check fields
|
|
||||||
if check.policy:
|
if check.policy:
|
||||||
update_policy_check_fields_task(checkpk=pk)
|
update_policy_check_fields_task.delay(check=check.pk)
|
||||||
|
|
||||||
return Response(f"{obj.readable_desc} was edited!")
|
return Response(f"{check.readable_desc} was edited!")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
check = get_object_or_404(Check, pk=pk)
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
check_pk = check.pk
|
check = get_object_or_404(Check, pk=pk)
|
||||||
policy_pk = None
|
|
||||||
if check.policy:
|
|
||||||
policy_pk = check.policy.pk
|
|
||||||
|
|
||||||
check.delete()
|
check.delete()
|
||||||
|
|
||||||
# Policy check deleted
|
# Policy check deleted
|
||||||
if check.policy:
|
if check.policy:
|
||||||
delete_policy_check_task.delay(checkpk=check_pk)
|
Check.objects.filter(managed_by_policy=True, parent_check=pk).delete()
|
||||||
|
|
||||||
# Re-evaluate agent checks is policy was enforced
|
# Re-evaluate agent checks is policy was enforced
|
||||||
if check.policy.enforced:
|
if check.policy.enforced:
|
||||||
generate_agent_checks_from_policies_task.delay(policypk=policy_pk)
|
generate_agent_checks_task.delay(policy=check.policy)
|
||||||
|
|
||||||
# Agent check deleted
|
# Agent check deleted
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
@@ -135,7 +146,7 @@ class GetUpdateDeleteCheck(APIView):
|
|||||||
return Response(f"{check.readable_desc} was deleted!")
|
return Response(f"{check.readable_desc} was deleted!")
|
||||||
|
|
||||||
|
|
||||||
class CheckHistory(APIView):
|
class GetCheckHistory(APIView):
|
||||||
def patch(self, request, checkpk):
|
def patch(self, request, checkpk):
|
||||||
check = get_object_or_404(Check, pk=checkpk)
|
check = get_object_or_404(Check, pk=checkpk)
|
||||||
|
|
||||||
@@ -149,7 +160,7 @@ class CheckHistory(APIView):
|
|||||||
- djangotime.timedelta(days=request.data["timeFilter"]),
|
- djangotime.timedelta(days=request.data["timeFilter"]),
|
||||||
)
|
)
|
||||||
|
|
||||||
check_history = check.check_history.filter(timeFilter).order_by("-x")
|
check_history = CheckHistory.objects.filter(check_id=checkpk).filter(timeFilter).order_by("-x") # type: ignore
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
CheckHistorySerializer(
|
CheckHistorySerializer(
|
||||||
@@ -159,10 +170,9 @@ class CheckHistory(APIView):
|
|||||||
|
|
||||||
|
|
||||||
@api_view()
|
@api_view()
|
||||||
|
@permission_classes([IsAuthenticated, RunChecksPerms])
|
||||||
def run_checks(request, pk):
|
def run_checks(request, pk):
|
||||||
agent = get_object_or_404(Agent, pk=pk)
|
agent = get_object_or_404(Agent, pk=pk)
|
||||||
if not agent.has_nats:
|
|
||||||
return notify_error("Requires agent version 1.1.0 or greater")
|
|
||||||
|
|
||||||
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
|
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
|
||||||
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
|
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Client, Deployment, Site
|
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||||
|
|
||||||
admin.site.register(Client)
|
admin.site.register(Client)
|
||||||
admin.site.register(Site)
|
admin.site.register(Site)
|
||||||
admin.site.register(Deployment)
|
admin.site.register(Deployment)
|
||||||
|
admin.site.register(ClientCustomField)
|
||||||
|
admin.site.register(SiteCustomField)
|
||||||
|
|||||||
@@ -0,0 +1,33 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0014_customfield'),
|
||||||
|
('clients', '0009_auto_20210212_1408'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SiteCustomField',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('value', models.TextField(blank=True, null=True)),
|
||||||
|
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_fields', to='core.customfield')),
|
||||||
|
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='clients.site')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ClientCustomField',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('value', models.TextField(blank=True, null=True)),
|
||||||
|
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='clients.client')),
|
||||||
|
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='client_fields', to='core.customfield')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-21 15:11
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0010_clientcustomfield_sitecustomfield'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='site',
|
||||||
|
unique_together={('client', 'name')},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-26 06:52
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0011_auto_20210321_1511'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='deployment',
|
||||||
|
name='created',
|
||||||
|
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-29 02:51
|
||||||
|
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0012_deployment_created'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='clientcustomfield',
|
||||||
|
name='multiple_value',
|
||||||
|
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='sitecustomfield',
|
||||||
|
name='multiple_value',
|
||||||
|
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-29 03:01
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0013_auto_20210329_0251'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='clientcustomfield',
|
||||||
|
name='checkbox_value',
|
||||||
|
field=models.BooleanField(blank=True, default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='sitecustomfield',
|
||||||
|
name='checkbox_value',
|
||||||
|
field=models.BooleanField(blank=True, default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0014_auto_20210329_0301'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='clientcustomfield',
|
||||||
|
old_name='checkbox_value',
|
||||||
|
new_name='bool_value',
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='clientcustomfield',
|
||||||
|
old_name='value',
|
||||||
|
new_name='string_value',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='sitecustomfield',
|
||||||
|
name='value',
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-03-29 18:27
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0015_auto_20210329_1709'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='sitecustomfield',
|
||||||
|
old_name='checkbox_value',
|
||||||
|
new_name='bool_value',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='sitecustomfield',
|
||||||
|
name='string_value',
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.1.7 on 2021-04-17 01:25
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0016_auto_20210329_1827'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='client',
|
||||||
|
name='block_policy_inheritance',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='site',
|
||||||
|
name='block_policy_inheritance',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
@@ -8,6 +9,7 @@ from logs.models import BaseAuditModel
|
|||||||
|
|
||||||
class Client(BaseAuditModel):
|
class Client(BaseAuditModel):
|
||||||
name = models.CharField(max_length=255, unique=True)
|
name = models.CharField(max_length=255, unique=True)
|
||||||
|
block_policy_inheritance = models.BooleanField(default=False)
|
||||||
workstation_policy = models.ForeignKey(
|
workstation_policy = models.ForeignKey(
|
||||||
"automation.Policy",
|
"automation.Policy",
|
||||||
related_name="workstation_clients",
|
related_name="workstation_clients",
|
||||||
@@ -32,27 +34,30 @@ class Client(BaseAuditModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def save(self, *args, **kw):
|
def save(self, *args, **kw):
|
||||||
from automation.tasks import generate_agent_checks_by_location_task
|
from alerts.tasks import cache_agents_alert_template
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
# get old client if exists
|
# get old client if exists
|
||||||
old_client = type(self).objects.get(pk=self.pk) if self.pk else None
|
old_client = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||||
super(BaseAuditModel, self).save(*args, **kw)
|
super(BaseAuditModel, self).save(*args, **kw)
|
||||||
|
|
||||||
# check if server polcies have changed and initiate task to reapply policies if so
|
# check if polcies have changed and initiate task to reapply policies if so
|
||||||
if old_client and old_client.server_policy != self.server_policy:
|
if old_client:
|
||||||
generate_agent_checks_by_location_task.delay(
|
if (
|
||||||
location={"site__client_id": self.pk},
|
(old_client.server_policy != self.server_policy)
|
||||||
mon_type="server",
|
or (old_client.workstation_policy != self.workstation_policy)
|
||||||
create_tasks=True,
|
or (
|
||||||
)
|
old_client.block_policy_inheritance != self.block_policy_inheritance
|
||||||
|
)
|
||||||
|
):
|
||||||
|
|
||||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
generate_agent_checks_task.delay(
|
||||||
if old_client and old_client.workstation_policy != self.workstation_policy:
|
client=self.pk,
|
||||||
generate_agent_checks_by_location_task.delay(
|
create_tasks=True,
|
||||||
location={"site__client_id": self.pk},
|
)
|
||||||
mon_type="workstation",
|
|
||||||
create_tasks=True,
|
if old_client.alert_template != self.alert_template:
|
||||||
)
|
cache_agents_alert_template.delay()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ("name",)
|
ordering = ("name",)
|
||||||
@@ -60,6 +65,10 @@ class Client(BaseAuditModel):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def agent_count(self) -> int:
|
||||||
|
return Agent.objects.filter(site__client=self).count()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_maintenanace_mode_agents(self):
|
def has_maintenanace_mode_agents(self):
|
||||||
return (
|
return (
|
||||||
@@ -81,16 +90,24 @@ class Client(BaseAuditModel):
|
|||||||
.prefetch_related("agentchecks")
|
.prefetch_related("agentchecks")
|
||||||
)
|
)
|
||||||
|
|
||||||
failing = 0
|
data = {"error": False, "warning": False}
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
if agent.checks["has_failing_checks"]:
|
if agent.checks["has_failing_checks"]:
|
||||||
failing += 1
|
|
||||||
|
if agent.checks["warning"]:
|
||||||
|
data["warning"] = True
|
||||||
|
|
||||||
|
if agent.checks["failing"]:
|
||||||
|
data["error"] = True
|
||||||
|
break
|
||||||
|
|
||||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||||
if agent.status == "overdue":
|
if agent.status == "overdue":
|
||||||
failing += 1
|
data["error"] = True
|
||||||
|
break
|
||||||
|
|
||||||
return failing > 0
|
return data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(client):
|
def serialize(client):
|
||||||
@@ -103,6 +120,7 @@ class Client(BaseAuditModel):
|
|||||||
class Site(BaseAuditModel):
|
class Site(BaseAuditModel):
|
||||||
client = models.ForeignKey(Client, related_name="sites", on_delete=models.CASCADE)
|
client = models.ForeignKey(Client, related_name="sites", on_delete=models.CASCADE)
|
||||||
name = models.CharField(max_length=255)
|
name = models.CharField(max_length=255)
|
||||||
|
block_policy_inheritance = models.BooleanField(default=False)
|
||||||
workstation_policy = models.ForeignKey(
|
workstation_policy = models.ForeignKey(
|
||||||
"automation.Policy",
|
"automation.Policy",
|
||||||
related_name="workstation_sites",
|
related_name="workstation_sites",
|
||||||
@@ -127,34 +145,37 @@ class Site(BaseAuditModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def save(self, *args, **kw):
|
def save(self, *args, **kw):
|
||||||
from automation.tasks import generate_agent_checks_by_location_task
|
from alerts.tasks import cache_agents_alert_template
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
# get old client if exists
|
# get old client if exists
|
||||||
old_site = type(self).objects.get(pk=self.pk) if self.pk else None
|
old_site = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||||
super(Site, self).save(*args, **kw)
|
super(Site, self).save(*args, **kw)
|
||||||
|
|
||||||
# check if server polcies have changed and initiate task to reapply policies if so
|
# check if polcies have changed and initiate task to reapply policies if so
|
||||||
if old_site and old_site.server_policy != self.server_policy:
|
if old_site:
|
||||||
generate_agent_checks_by_location_task.delay(
|
if (
|
||||||
location={"site_id": self.pk},
|
(old_site.server_policy != self.server_policy)
|
||||||
mon_type="server",
|
or (old_site.workstation_policy != self.workstation_policy)
|
||||||
create_tasks=True,
|
or (old_site.block_policy_inheritance != self.block_policy_inheritance)
|
||||||
)
|
):
|
||||||
|
|
||||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
|
||||||
if old_site and old_site.workstation_policy != self.workstation_policy:
|
|
||||||
generate_agent_checks_by_location_task.delay(
|
if old_site.alert_template != self.alert_template:
|
||||||
location={"site_id": self.pk},
|
cache_agents_alert_template.delay()
|
||||||
mon_type="workstation",
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ("name",)
|
ordering = ("name",)
|
||||||
|
unique_together = (("client", "name"),)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def agent_count(self) -> int:
|
||||||
|
return Agent.objects.filter(site=self).count()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_maintenanace_mode_agents(self):
|
def has_maintenanace_mode_agents(self):
|
||||||
return Agent.objects.filter(site=self, maintenance_mode=True).count() > 0
|
return Agent.objects.filter(site=self, maintenance_mode=True).count() > 0
|
||||||
@@ -174,16 +195,24 @@ class Site(BaseAuditModel):
|
|||||||
.prefetch_related("agentchecks")
|
.prefetch_related("agentchecks")
|
||||||
)
|
)
|
||||||
|
|
||||||
failing = 0
|
data = {"error": False, "warning": False}
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
|
|
||||||
if agent.checks["has_failing_checks"]:
|
if agent.checks["has_failing_checks"]:
|
||||||
failing += 1
|
if agent.checks["warning"]:
|
||||||
|
data["warning"] = True
|
||||||
|
|
||||||
|
if agent.checks["failing"]:
|
||||||
|
data["error"] = True
|
||||||
|
break
|
||||||
|
|
||||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||||
if agent.status == "overdue":
|
if agent.status == "overdue":
|
||||||
failing += 1
|
data["error"] = True
|
||||||
|
break
|
||||||
|
|
||||||
return failing > 0
|
return data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(site):
|
def serialize(site):
|
||||||
@@ -217,6 +246,7 @@ class Deployment(models.Model):
|
|||||||
)
|
)
|
||||||
arch = models.CharField(max_length=255, choices=ARCH_CHOICES, default="64")
|
arch = models.CharField(max_length=255, choices=ARCH_CHOICES, default="64")
|
||||||
expiry = models.DateTimeField(null=True, blank=True)
|
expiry = models.DateTimeField(null=True, blank=True)
|
||||||
|
created = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
||||||
auth_token = models.ForeignKey(
|
auth_token = models.ForeignKey(
|
||||||
"knox.AuthToken", related_name="deploytokens", on_delete=models.CASCADE
|
"knox.AuthToken", related_name="deploytokens", on_delete=models.CASCADE
|
||||||
)
|
)
|
||||||
@@ -225,3 +255,73 @@ class Deployment(models.Model):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{self.client} - {self.site} - {self.mon_type}"
|
return f"{self.client} - {self.site} - {self.mon_type}"
|
||||||
|
|
||||||
|
|
||||||
|
class ClientCustomField(models.Model):
|
||||||
|
client = models.ForeignKey(
|
||||||
|
Client,
|
||||||
|
related_name="custom_fields",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
field = models.ForeignKey(
|
||||||
|
"core.CustomField",
|
||||||
|
related_name="client_fields",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
string_value = models.TextField(null=True, blank=True)
|
||||||
|
bool_value = models.BooleanField(blank=True, default=False)
|
||||||
|
multiple_value = ArrayField(
|
||||||
|
models.TextField(null=True, blank=True),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.field.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self):
|
||||||
|
if self.field.type == "multiple":
|
||||||
|
return self.multiple_value
|
||||||
|
elif self.field.type == "checkbox":
|
||||||
|
return self.bool_value
|
||||||
|
else:
|
||||||
|
return self.string_value
|
||||||
|
|
||||||
|
|
||||||
|
class SiteCustomField(models.Model):
|
||||||
|
site = models.ForeignKey(
|
||||||
|
Site,
|
||||||
|
related_name="custom_fields",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
field = models.ForeignKey(
|
||||||
|
"core.CustomField",
|
||||||
|
related_name="site_fields",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
string_value = models.TextField(null=True, blank=True)
|
||||||
|
bool_value = models.BooleanField(blank=True, default=False)
|
||||||
|
multiple_value = ArrayField(
|
||||||
|
models.TextField(null=True, blank=True),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.field.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self):
|
||||||
|
if self.field.type == "multiple":
|
||||||
|
return self.multiple_value
|
||||||
|
elif self.field.type == "checkbox":
|
||||||
|
return self.bool_value
|
||||||
|
else:
|
||||||
|
return self.string_value
|
||||||
|
|||||||
27
api/tacticalrmm/clients/permissions.py
Normal file
27
api/tacticalrmm/clients/permissions.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm
|
||||||
|
|
||||||
|
|
||||||
|
class ManageClientsPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_clients")
|
||||||
|
|
||||||
|
|
||||||
|
class ManageSitesPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_sites")
|
||||||
|
|
||||||
|
|
||||||
|
class ManageDeploymentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view):
|
||||||
|
if r.method == "GET":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_deployments")
|
||||||
@@ -1,42 +1,93 @@
|
|||||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError
|
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError
|
||||||
|
|
||||||
from .models import Client, Deployment, Site
|
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||||
|
|
||||||
|
|
||||||
|
class SiteCustomFieldSerializer(ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = SiteCustomField
|
||||||
|
fields = (
|
||||||
|
"id",
|
||||||
|
"field",
|
||||||
|
"site",
|
||||||
|
"value",
|
||||||
|
"string_value",
|
||||||
|
"bool_value",
|
||||||
|
"multiple_value",
|
||||||
|
)
|
||||||
|
extra_kwargs = {
|
||||||
|
"string_value": {"write_only": True},
|
||||||
|
"bool_value": {"write_only": True},
|
||||||
|
"multiple_value": {"write_only": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class SiteSerializer(ModelSerializer):
|
class SiteSerializer(ModelSerializer):
|
||||||
client_name = ReadOnlyField(source="client.name")
|
client_name = ReadOnlyField(source="client.name")
|
||||||
|
custom_fields = SiteCustomFieldSerializer(many=True, read_only=True)
|
||||||
|
agent_count = ReadOnlyField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Site
|
model = Site
|
||||||
fields = "__all__"
|
fields = (
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"server_policy",
|
||||||
|
"workstation_policy",
|
||||||
|
"alert_template",
|
||||||
|
"client_name",
|
||||||
|
"client",
|
||||||
|
"custom_fields",
|
||||||
|
"agent_count",
|
||||||
|
"block_policy_inheritance",
|
||||||
|
)
|
||||||
|
|
||||||
def validate(self, val):
|
def validate(self, val):
|
||||||
if "name" in val.keys() and "|" in val["name"]:
|
if "name" in val.keys() and "|" in val["name"]:
|
||||||
raise ValidationError("Site name cannot contain the | character")
|
raise ValidationError("Site name cannot contain the | character")
|
||||||
|
|
||||||
if self.context:
|
|
||||||
client = Client.objects.get(pk=self.context["clientpk"])
|
|
||||||
if Site.objects.filter(client=client, name=val["name"]).exists():
|
|
||||||
raise ValidationError(f"Site {val['name']} already exists")
|
|
||||||
|
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
class ClientCustomFieldSerializer(ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ClientCustomField
|
||||||
|
fields = (
|
||||||
|
"id",
|
||||||
|
"field",
|
||||||
|
"client",
|
||||||
|
"value",
|
||||||
|
"string_value",
|
||||||
|
"bool_value",
|
||||||
|
"multiple_value",
|
||||||
|
)
|
||||||
|
extra_kwargs = {
|
||||||
|
"string_value": {"write_only": True},
|
||||||
|
"bool_value": {"write_only": True},
|
||||||
|
"multiple_value": {"write_only": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ClientSerializer(ModelSerializer):
|
class ClientSerializer(ModelSerializer):
|
||||||
sites = SiteSerializer(many=True, read_only=True)
|
sites = SiteSerializer(many=True, read_only=True)
|
||||||
|
custom_fields = ClientCustomFieldSerializer(many=True, read_only=True)
|
||||||
|
agent_count = ReadOnlyField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Client
|
model = Client
|
||||||
fields = "__all__"
|
fields = (
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"server_policy",
|
||||||
|
"workstation_policy",
|
||||||
|
"alert_template",
|
||||||
|
"block_policy_inheritance",
|
||||||
|
"sites",
|
||||||
|
"custom_fields",
|
||||||
|
"agent_count",
|
||||||
|
)
|
||||||
|
|
||||||
def validate(self, val):
|
def validate(self, val):
|
||||||
|
|
||||||
if "site" in self.context:
|
|
||||||
if "|" in self.context["site"]:
|
|
||||||
raise ValidationError("Site name cannot contain the | character")
|
|
||||||
if len(self.context["site"]) > 255:
|
|
||||||
raise ValidationError("Site name too long")
|
|
||||||
|
|
||||||
if "name" in val.keys() and "|" in val["name"]:
|
if "name" in val.keys() and "|" in val["name"]:
|
||||||
raise ValidationError("Client name cannot contain the | character")
|
raise ValidationError("Client name cannot contain the | character")
|
||||||
|
|
||||||
@@ -50,7 +101,6 @@ class SiteTreeSerializer(ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
model = Site
|
model = Site
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
ordering = ("failing_checks",)
|
|
||||||
|
|
||||||
|
|
||||||
class ClientTreeSerializer(ModelSerializer):
|
class ClientTreeSerializer(ModelSerializer):
|
||||||
@@ -61,7 +111,6 @@ class ClientTreeSerializer(ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
model = Client
|
model = Client
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
ordering = ("failing_checks",)
|
|
||||||
|
|
||||||
|
|
||||||
class DeploymentSerializer(ModelSerializer):
|
class DeploymentSerializer(ModelSerializer):
|
||||||
@@ -83,4 +132,5 @@ class DeploymentSerializer(ModelSerializer):
|
|||||||
"arch",
|
"arch",
|
||||||
"expiry",
|
"expiry",
|
||||||
"install_flags",
|
"install_flags",
|
||||||
|
"created",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
import uuid
|
import uuid
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
from .models import Client, Deployment, Site
|
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
ClientSerializer,
|
ClientSerializer,
|
||||||
ClientTreeSerializer,
|
ClientTreeSerializer,
|
||||||
@@ -28,18 +29,29 @@ class TestClientViews(TacticalTestCase):
|
|||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
serializer = ClientSerializer(clients, many=True)
|
serializer = ClientSerializer(clients, many=True)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, serializer.data)
|
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_add_client(self):
|
def test_add_client(self):
|
||||||
url = "/clients/clients/"
|
url = "/clients/clients/"
|
||||||
payload = {"client": "Company 1", "site": "Site 1"}
|
|
||||||
|
# test successfull add client
|
||||||
|
payload = {
|
||||||
|
"client": {"name": "Client1"},
|
||||||
|
"site": {"name": "Site1"},
|
||||||
|
"custom_fields": [],
|
||||||
|
}
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
payload["client"] = "Company1|askd"
|
# test add client with | in name
|
||||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
payload = {
|
||||||
|
"client": {"name": "Client2|d"},
|
||||||
|
"site": {"name": "Site1"},
|
||||||
|
"custom_fields": [],
|
||||||
|
}
|
||||||
|
serializer = ClientSerializer(data=payload["client"])
|
||||||
with self.assertRaisesMessage(
|
with self.assertRaisesMessage(
|
||||||
ValidationError, "Client name cannot contain the | character"
|
ValidationError, "Client name cannot contain the | character"
|
||||||
):
|
):
|
||||||
@@ -48,19 +60,22 @@ class TestClientViews(TacticalTestCase):
|
|||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
payload = {"client": "Company 156", "site": "Site2|a34"}
|
# test add client with | in Site name
|
||||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
payload = {
|
||||||
with self.assertRaisesMessage(
|
"client": {"name": "Client2"},
|
||||||
ValidationError, "Site name cannot contain the | character"
|
"site": {"name": "Site1|fds"},
|
||||||
):
|
"custom_fields": [],
|
||||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
}
|
||||||
|
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
# test unique
|
# test unique
|
||||||
payload = {"client": "Company 1", "site": "Site 1"}
|
payload = {
|
||||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
"client": {"name": "Client1"},
|
||||||
|
"site": {"name": "Site1"},
|
||||||
|
"custom_fields": [],
|
||||||
|
}
|
||||||
|
serializer = ClientSerializer(data=payload["client"])
|
||||||
with self.assertRaisesMessage(
|
with self.assertRaisesMessage(
|
||||||
ValidationError, "client with this name already exists."
|
ValidationError, "client with this name already exists."
|
||||||
):
|
):
|
||||||
@@ -69,66 +84,124 @@ class TestClientViews(TacticalTestCase):
|
|||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
# test long site name
|
# test initial setup
|
||||||
payload = {"client": "Company 2394", "site": "Site123" * 100}
|
|
||||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
|
||||||
with self.assertRaisesMessage(ValidationError, "Site name too long"):
|
|
||||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
|
||||||
|
|
||||||
r = self.client.post(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"client": {"client": "Company 4", "site": "HQ"},
|
"client": {"name": "Setup Client"},
|
||||||
"initialsetup": True,
|
"site": {"name": "Setup Site"},
|
||||||
"timezone": "America/Los_Angeles",
|
"timezone": "America/Los_Angeles",
|
||||||
|
"initialsetup": True,
|
||||||
}
|
}
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
# test add with custom fields
|
||||||
|
field = baker.make("core.CustomField", model="client", type="text")
|
||||||
|
payload = {
|
||||||
|
"client": {"name": "Custom Field Client"},
|
||||||
|
"site": {"name": "Setup Site"},
|
||||||
|
"custom_fields": [{"field": field.id, "string_value": "new Value"}], # type: ignore
|
||||||
|
}
|
||||||
|
r = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
client = Client.objects.get(name="Custom Field Client")
|
||||||
|
self.assertTrue(
|
||||||
|
ClientCustomField.objects.filter(client=client, field=field).exists()
|
||||||
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
|
def test_get_client(self):
|
||||||
|
# setup data
|
||||||
|
client = baker.make("clients.Client")
|
||||||
|
|
||||||
|
url = f"/clients/{client.id}/client/" # type: ignore
|
||||||
|
r = self.client.get(url, format="json")
|
||||||
|
serializer = ClientSerializer(client)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||||
|
|
||||||
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_edit_client(self):
|
def test_edit_client(self):
|
||||||
# setup data
|
# setup data
|
||||||
client = baker.make("clients.Client")
|
client = baker.make("clients.Client", name="OldClientName")
|
||||||
|
|
||||||
# test invalid id
|
# test invalid id
|
||||||
r = self.client.put("/clients/500/client/", format="json")
|
r = self.client.put("/clients/500/client/", format="json")
|
||||||
self.assertEqual(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
data = {"id": client.id, "name": "New Name"}
|
# test successfull edit client
|
||||||
|
data = {"client": {"name": "NewClientName"}, "custom_fields": []}
|
||||||
url = f"/clients/{client.id}/client/"
|
url = f"/clients/{client.id}/client/" # type: ignore
|
||||||
r = self.client.put(url, data, format="json")
|
r = self.client.put(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(Client.objects.filter(name="New Name").exists())
|
self.assertTrue(Client.objects.filter(name="NewClientName").exists())
|
||||||
|
self.assertFalse(Client.objects.filter(name="OldClientName").exists())
|
||||||
|
|
||||||
|
# test edit client with | in name
|
||||||
|
data = {"client": {"name": "NewClie|ntName"}, "custom_fields": []}
|
||||||
|
url = f"/clients/{client.id}/client/" # type: ignore
|
||||||
|
r = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
|
# test add with custom fields new value
|
||||||
|
field = baker.make("core.CustomField", model="client", type="checkbox")
|
||||||
|
payload = {
|
||||||
|
"client": {
|
||||||
|
"id": client.id, # type: ignore
|
||||||
|
"name": "Custom Field Client",
|
||||||
|
},
|
||||||
|
"custom_fields": [{"field": field.id, "bool_value": True}], # type: ignore
|
||||||
|
}
|
||||||
|
r = self.client.put(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
client = Client.objects.get(name="Custom Field Client")
|
||||||
|
self.assertTrue(
|
||||||
|
ClientCustomField.objects.filter(client=client, field=field).exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
# edit custom field value
|
||||||
|
payload = {
|
||||||
|
"client": {
|
||||||
|
"id": client.id, # type: ignore
|
||||||
|
"name": "Custom Field Client",
|
||||||
|
},
|
||||||
|
"custom_fields": [{"field": field.id, "bool_value": False}], # type: ignore
|
||||||
|
}
|
||||||
|
r = self.client.put(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
self.assertFalse(
|
||||||
|
ClientCustomField.objects.get(client=client, field=field).value
|
||||||
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
def test_delete_client(self):
|
def test_delete_client(self):
|
||||||
|
from agents.models import Agent
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
client = baker.make("clients.Client")
|
client_to_delete = baker.make("clients.Client")
|
||||||
site = baker.make("clients.Site", client=client)
|
client_to_move = baker.make("clients.Client")
|
||||||
agent = baker.make_recipe("agents.agent", site=site)
|
site_to_move = baker.make("clients.Site", client=client_to_move)
|
||||||
|
agent = baker.make_recipe("agents.agent", site=site_to_move)
|
||||||
|
|
||||||
# test invalid id
|
# test invalid id
|
||||||
r = self.client.delete("/clients/500/client/", format="json")
|
r = self.client.delete("/clients/334/953/", format="json")
|
||||||
self.assertEqual(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
url = f"/clients/{client.id}/client/"
|
url = f"/clients/{client_to_delete.id}/{site_to_move.id}/" # type: ignore
|
||||||
|
|
||||||
# test deleting with agents under client
|
|
||||||
r = self.client.delete(url, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
# test successful deletion
|
# test successful deletion
|
||||||
agent.delete()
|
|
||||||
r = self.client.delete(url, format="json")
|
r = self.client.delete(url, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertFalse(Client.objects.filter(pk=client.id).exists())
|
agent_moved = Agent.objects.get(pk=agent.pk)
|
||||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
|
||||||
|
self.assertFalse(Client.objects.filter(pk=client_to_delete.id).exists()) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
def test_get_sites(self):
|
def test_get_sites(self):
|
||||||
# setup data
|
# setup data
|
||||||
@@ -139,29 +212,31 @@ class TestClientViews(TacticalTestCase):
|
|||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
serializer = SiteSerializer(sites, many=True)
|
serializer = SiteSerializer(sites, many=True)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, serializer.data)
|
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
def test_add_site(self):
|
def test_add_site(self):
|
||||||
# setup data
|
# setup data
|
||||||
site = baker.make("clients.Site")
|
client = baker.make("clients.Client")
|
||||||
|
site = baker.make("clients.Site", client=client)
|
||||||
|
|
||||||
url = "/clients/sites/"
|
url = "/clients/sites/"
|
||||||
|
|
||||||
# test success add
|
# test success add
|
||||||
payload = {"client": site.client.id, "name": "LA Office"}
|
payload = {
|
||||||
|
"site": {"client": client.id, "name": "LA Office"}, # type: ignore
|
||||||
|
"custom_fields": [],
|
||||||
|
}
|
||||||
r = self.client.post(url, payload, format="json")
|
r = self.client.post(url, payload, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(
|
|
||||||
Site.objects.filter(
|
|
||||||
name="LA Office", client__name=site.client.name
|
|
||||||
).exists()
|
|
||||||
)
|
|
||||||
|
|
||||||
# test with | symbol
|
# test with | symbol
|
||||||
payload = {"client": site.client.id, "name": "LA Off|ice |*&@#$"}
|
payload = {
|
||||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
"site": {"client": client.id, "name": "LA Office |*&@#$"}, # type: ignore
|
||||||
|
"custom_fields": [],
|
||||||
|
}
|
||||||
|
serializer = SiteSerializer(data=payload["site"])
|
||||||
with self.assertRaisesMessage(
|
with self.assertRaisesMessage(
|
||||||
ValidationError, "Site name cannot contain the | character"
|
ValidationError, "Site name cannot contain the | character"
|
||||||
):
|
):
|
||||||
@@ -171,55 +246,135 @@ class TestClientViews(TacticalTestCase):
|
|||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
# test site already exists
|
# test site already exists
|
||||||
payload = {"client": site.client.id, "name": "LA Office"}
|
payload = {
|
||||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
"site": {"client": site.client.id, "name": "LA Office"}, # type: ignore
|
||||||
with self.assertRaisesMessage(ValidationError, "Site LA Office already exists"):
|
"custom_fields": [],
|
||||||
|
}
|
||||||
|
serializer = SiteSerializer(data=payload["site"])
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
ValidationError, "The fields client, name must make a unique set."
|
||||||
|
):
|
||||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||||
|
|
||||||
|
# test add with custom fields
|
||||||
|
field = baker.make(
|
||||||
|
"core.CustomField",
|
||||||
|
model="site",
|
||||||
|
type="single",
|
||||||
|
options=["one", "two", "three"],
|
||||||
|
)
|
||||||
|
payload = {
|
||||||
|
"site": {"client": client.id, "name": "Custom Field Site"}, # type: ignore
|
||||||
|
"custom_fields": [{"field": field.id, "string_value": "one"}], # type: ignore
|
||||||
|
}
|
||||||
|
r = self.client.post(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
site = Site.objects.get(name="Custom Field Site")
|
||||||
|
self.assertTrue(SiteCustomField.objects.filter(site=site, field=field).exists())
|
||||||
|
|
||||||
self.check_not_authenticated("post", url)
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
def test_edit_site(self):
|
def test_get_site(self):
|
||||||
# setup data
|
# setup data
|
||||||
site = baker.make("clients.Site")
|
site = baker.make("clients.Site")
|
||||||
|
|
||||||
|
url = f"/clients/sites/{site.id}/" # type: ignore
|
||||||
|
r = self.client.get(url, format="json")
|
||||||
|
serializer = SiteSerializer(site)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||||
|
|
||||||
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
def test_edit_site(self):
|
||||||
|
# setup data
|
||||||
|
client = baker.make("clients.Client")
|
||||||
|
site = baker.make("clients.Site", client=client)
|
||||||
|
|
||||||
# test invalid id
|
# test invalid id
|
||||||
r = self.client.put("/clients/500/site/", format="json")
|
r = self.client.put("/clients/sites/688/", format="json")
|
||||||
self.assertEqual(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
data = {"id": site.id, "name": "New Name", "client": site.client.id}
|
data = {
|
||||||
|
"site": {"client": client.id, "name": "New Site Name"}, # type: ignore
|
||||||
|
"custom_fields": [],
|
||||||
|
}
|
||||||
|
|
||||||
url = f"/clients/{site.id}/site/"
|
url = f"/clients/sites/{site.id}/" # type: ignore
|
||||||
r = self.client.put(url, data, format="json")
|
r = self.client.put(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(Site.objects.filter(name="New Name").exists())
|
self.assertTrue(
|
||||||
|
Site.objects.filter(client=client, name="New Site Name").exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
# test add with custom fields new value
|
||||||
|
field = baker.make(
|
||||||
|
"core.CustomField",
|
||||||
|
model="site",
|
||||||
|
type="multiple",
|
||||||
|
options=["one", "two", "three"],
|
||||||
|
)
|
||||||
|
payload = {
|
||||||
|
"site": {
|
||||||
|
"id": site.id, # type: ignore
|
||||||
|
"client": site.client.id, # type: ignore
|
||||||
|
"name": "Custom Field Site",
|
||||||
|
},
|
||||||
|
"custom_fields": [{"field": field.id, "multiple_value": ["two", "three"]}], # type: ignore
|
||||||
|
}
|
||||||
|
r = self.client.put(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
site = Site.objects.get(name="Custom Field Site")
|
||||||
|
self.assertTrue(SiteCustomField.objects.filter(site=site, field=field).exists())
|
||||||
|
|
||||||
|
# edit custom field value
|
||||||
|
payload = {
|
||||||
|
"site": {
|
||||||
|
"id": site.id, # type: ignore
|
||||||
|
"client": client.id, # type: ignore
|
||||||
|
"name": "Custom Field Site",
|
||||||
|
},
|
||||||
|
"custom_fields": [{"field": field.id, "multiple_value": ["one"]}], # type: ignore
|
||||||
|
}
|
||||||
|
r = self.client.put(url, payload, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
self.assertTrue(
|
||||||
|
SiteCustomField.objects.get(site=site, field=field).value,
|
||||||
|
["one"],
|
||||||
|
)
|
||||||
|
|
||||||
self.check_not_authenticated("put", url)
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
def test_delete_site(self):
|
def test_delete_site(self):
|
||||||
|
from agents.models import Agent
|
||||||
|
|
||||||
# setup data
|
# setup data
|
||||||
site = baker.make("clients.Site")
|
client = baker.make("clients.Client")
|
||||||
agent = baker.make_recipe("agents.agent", site=site)
|
site_to_delete = baker.make("clients.Site", client=client)
|
||||||
|
site_to_move = baker.make("clients.Site")
|
||||||
|
agent = baker.make_recipe("agents.agent", site=site_to_delete)
|
||||||
|
|
||||||
# test invalid id
|
# test invalid id
|
||||||
r = self.client.delete("/clients/500/site/", format="json")
|
r = self.client.delete("/clients/500/445/", format="json")
|
||||||
self.assertEqual(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
url = f"/clients/{site.id}/site/"
|
url = f"/clients/sites/{site_to_delete.id}/{site_to_move.id}/" # type: ignore
|
||||||
|
|
||||||
# test deleting with last site under client
|
# test deleting with last site under client
|
||||||
r = self.client.delete(url, format="json")
|
r = self.client.delete(url, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
|
self.assertEqual(r.json(), "A client must have at least 1 site.")
|
||||||
# test deletion when agents exist under site
|
|
||||||
baker.make("clients.Site", client=site.client)
|
|
||||||
r = self.client.delete(url, format="json")
|
|
||||||
self.assertEqual(r.status_code, 400)
|
|
||||||
|
|
||||||
# test successful deletion
|
# test successful deletion
|
||||||
agent.delete()
|
site_to_move.client = client # type: ignore
|
||||||
|
site_to_move.save(update_fields=["client"]) # type: ignore
|
||||||
r = self.client.delete(url, format="json")
|
r = self.client.delete(url, format="json")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
agent_moved = Agent.objects.get(pk=agent.pk)
|
||||||
|
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("delete", url)
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
@@ -233,7 +388,7 @@ class TestClientViews(TacticalTestCase):
|
|||||||
r = self.client.get(url, format="json")
|
r = self.client.get(url, format="json")
|
||||||
serializer = ClientTreeSerializer(clients, many=True)
|
serializer = ClientTreeSerializer(clients, many=True)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, serializer.data)
|
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@@ -245,7 +400,7 @@ class TestClientViews(TacticalTestCase):
|
|||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
serializer = DeploymentSerializer(deployments, many=True)
|
serializer = DeploymentSerializer(deployments, many=True)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, serializer.data)
|
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||||
|
|
||||||
self.check_not_authenticated("get", url)
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
@@ -255,8 +410,8 @@ class TestClientViews(TacticalTestCase):
|
|||||||
|
|
||||||
url = "/clients/deployments/"
|
url = "/clients/deployments/"
|
||||||
payload = {
|
payload = {
|
||||||
"client": site.client.id,
|
"client": site.client.id, # type: ignore
|
||||||
"site": site.id,
|
"site": site.id, # type: ignore
|
||||||
"expires": "2037-11-23 18:53",
|
"expires": "2037-11-23 18:53",
|
||||||
"power": 1,
|
"power": 1,
|
||||||
"ping": 0,
|
"ping": 0,
|
||||||
@@ -284,10 +439,10 @@ class TestClientViews(TacticalTestCase):
|
|||||||
|
|
||||||
url = "/clients/deployments/"
|
url = "/clients/deployments/"
|
||||||
|
|
||||||
url = f"/clients/{deployment.id}/deployment/"
|
url = f"/clients/{deployment.id}/deployment/" # type: ignore
|
||||||
r = self.client.delete(url)
|
r = self.client.delete(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists())
|
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists()) # type: ignore
|
||||||
|
|
||||||
url = "/clients/32348/deployment/"
|
url = "/clients/32348/deployment/"
|
||||||
r = self.client.delete(url)
|
r = self.client.delete(url)
|
||||||
@@ -301,7 +456,7 @@ class TestClientViews(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertEqual(r.data, "invalid")
|
self.assertEqual(r.data, "invalid") # type: ignore
|
||||||
|
|
||||||
uid = uuid.uuid4()
|
uid = uuid.uuid4()
|
||||||
url = f"/clients/{uid}/deploy/"
|
url = f"/clients/{uid}/deploy/"
|
||||||
|
|||||||
@@ -4,10 +4,12 @@ from . import views
|
|||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("clients/", views.GetAddClients.as_view()),
|
path("clients/", views.GetAddClients.as_view()),
|
||||||
path("<int:pk>/client/", views.GetUpdateDeleteClient.as_view()),
|
path("<int:pk>/client/", views.GetUpdateClient.as_view()),
|
||||||
|
path("<int:pk>/<int:sitepk>/", views.DeleteClient.as_view()),
|
||||||
path("tree/", views.GetClientTree.as_view()),
|
path("tree/", views.GetClientTree.as_view()),
|
||||||
path("sites/", views.GetAddSites.as_view()),
|
path("sites/", views.GetAddSites.as_view()),
|
||||||
path("<int:pk>/site/", views.GetUpdateDeleteSite.as_view()),
|
path("sites/<int:pk>/", views.GetUpdateSite.as_view()),
|
||||||
|
path("sites/<int:pk>/<int:sitepk>/", views.DeleteSite.as_view()),
|
||||||
path("deployments/", views.AgentDeployment.as_view()),
|
path("deployments/", views.AgentDeployment.as_view()),
|
||||||
path("<int:pk>/deployment/", views.AgentDeployment.as_view()),
|
path("<int:pk>/deployment/", views.AgentDeployment.as_view()),
|
||||||
path("<str:uid>/deploy/", views.GenerateAgent.as_view()),
|
path("<str:uid>/deploy/", views.GenerateAgent.as_view()),
|
||||||
|
|||||||
@@ -1,15 +1,13 @@
|
|||||||
import datetime as dt
|
import datetime as dt
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import subprocess
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.http import HttpResponse
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from rest_framework.permissions import AllowAny
|
from loguru import logger
|
||||||
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
@@ -17,60 +15,124 @@ from agents.models import Agent
|
|||||||
from core.models import CoreSettings
|
from core.models import CoreSettings
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .models import Client, Deployment, Site
|
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||||
|
from .permissions import ManageClientsPerms, ManageDeploymentPerms, ManageSitesPerms
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
|
ClientCustomFieldSerializer,
|
||||||
ClientSerializer,
|
ClientSerializer,
|
||||||
ClientTreeSerializer,
|
ClientTreeSerializer,
|
||||||
DeploymentSerializer,
|
DeploymentSerializer,
|
||||||
|
SiteCustomFieldSerializer,
|
||||||
SiteSerializer,
|
SiteSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
logger.configure(**settings.LOG_CONFIG)
|
||||||
|
|
||||||
|
|
||||||
class GetAddClients(APIView):
|
class GetAddClients(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageClientsPerms]
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
clients = Client.objects.all()
|
clients = Client.objects.all()
|
||||||
return Response(ClientSerializer(clients, many=True).data)
|
return Response(ClientSerializer(clients, many=True).data)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
|
# create client
|
||||||
|
client_serializer = ClientSerializer(data=request.data["client"])
|
||||||
|
client_serializer.is_valid(raise_exception=True)
|
||||||
|
client = client_serializer.save()
|
||||||
|
|
||||||
if "initialsetup" in request.data:
|
# create site
|
||||||
client = {"name": request.data["client"]["client"].strip()}
|
site_serializer = SiteSerializer(
|
||||||
site = {"name": request.data["client"]["site"].strip()}
|
data={"client": client.id, "name": request.data["site"]["name"]}
|
||||||
serializer = ClientSerializer(data=client, context=request.data["client"])
|
)
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
|
# make sure site serializer doesn't return errors and save
|
||||||
|
if site_serializer.is_valid():
|
||||||
|
site_serializer.save()
|
||||||
|
else:
|
||||||
|
# delete client since site serializer was invalid
|
||||||
|
client.delete()
|
||||||
|
site_serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
if "initialsetup" in request.data.keys():
|
||||||
core = CoreSettings.objects.first()
|
core = CoreSettings.objects.first()
|
||||||
core.default_time_zone = request.data["timezone"]
|
core.default_time_zone = request.data["timezone"]
|
||||||
core.save(update_fields=["default_time_zone"])
|
core.save(update_fields=["default_time_zone"])
|
||||||
else:
|
|
||||||
client = {"name": request.data["client"].strip()}
|
|
||||||
site = {"name": request.data["site"].strip()}
|
|
||||||
serializer = ClientSerializer(data=client, context=request.data)
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
|
|
||||||
obj = serializer.save()
|
# save custom fields
|
||||||
Site(client=obj, name=site["name"]).save()
|
if "custom_fields" in request.data.keys():
|
||||||
|
for field in request.data["custom_fields"]:
|
||||||
|
|
||||||
return Response(f"{obj} was added!")
|
custom_field = field
|
||||||
|
custom_field["client"] = client.id
|
||||||
|
|
||||||
|
serializer = ClientCustomFieldSerializer(data=custom_field)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
return Response(f"{client} was added!")
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteClient(APIView):
|
class GetUpdateClient(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageClientsPerms]
|
||||||
|
|
||||||
|
def get(self, request, pk):
|
||||||
|
client = get_object_or_404(Client, pk=pk)
|
||||||
|
return Response(ClientSerializer(client).data)
|
||||||
|
|
||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
client = get_object_or_404(Client, pk=pk)
|
client = get_object_or_404(Client, pk=pk)
|
||||||
|
|
||||||
serializer = ClientSerializer(data=request.data, instance=client, partial=True)
|
serializer = ClientSerializer(
|
||||||
|
data=request.data["client"], instance=client, partial=True
|
||||||
|
)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
return Response("The Client was renamed")
|
# update custom fields
|
||||||
|
if "custom_fields" in request.data.keys():
|
||||||
|
for field in request.data["custom_fields"]:
|
||||||
|
|
||||||
|
custom_field = field
|
||||||
|
custom_field["client"] = pk
|
||||||
|
|
||||||
|
if ClientCustomField.objects.filter(field=field["field"], client=pk):
|
||||||
|
value = ClientCustomField.objects.get(
|
||||||
|
field=field["field"], client=pk
|
||||||
|
)
|
||||||
|
serializer = ClientCustomFieldSerializer(
|
||||||
|
instance=value, data=custom_field
|
||||||
|
)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
else:
|
||||||
|
serializer = ClientCustomFieldSerializer(data=custom_field)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
return Response("The Client was updated")
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteClient(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageClientsPerms]
|
||||||
|
|
||||||
|
def delete(self, request, pk, sitepk):
|
||||||
|
from automation.tasks import generate_agent_checks_task
|
||||||
|
|
||||||
def delete(self, request, pk):
|
|
||||||
client = get_object_or_404(Client, pk=pk)
|
client = get_object_or_404(Client, pk=pk)
|
||||||
agent_count = Agent.objects.filter(site__client=client).count()
|
agents = Agent.objects.filter(site__client=client)
|
||||||
if agent_count > 0:
|
|
||||||
|
if not sitepk:
|
||||||
return notify_error(
|
return notify_error(
|
||||||
f"Cannot delete {client} while {agent_count} agents exist in it. Move the agents to another client first."
|
"There needs to be a site specified to move existing agents to"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
site = get_object_or_404(Site, pk=sitepk)
|
||||||
|
agents.update(site=site)
|
||||||
|
|
||||||
|
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||||
|
|
||||||
client.delete()
|
client.delete()
|
||||||
return Response(f"{client.name} was deleted!")
|
return Response(f"{client.name} was deleted!")
|
||||||
|
|
||||||
@@ -82,49 +144,107 @@ class GetClientTree(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetAddSites(APIView):
|
class GetAddSites(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageSitesPerms]
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
sites = Site.objects.all()
|
sites = Site.objects.all()
|
||||||
return Response(SiteSerializer(sites, many=True).data)
|
return Response(SiteSerializer(sites, many=True).data)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
name = request.data["name"].strip()
|
serializer = SiteSerializer(data=request.data["site"])
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
site = serializer.save()
|
||||||
|
|
||||||
|
# save custom fields
|
||||||
|
if "custom_fields" in request.data.keys():
|
||||||
|
|
||||||
|
for field in request.data["custom_fields"]:
|
||||||
|
|
||||||
|
custom_field = field
|
||||||
|
custom_field["site"] = site.id
|
||||||
|
|
||||||
|
serializer = SiteCustomFieldSerializer(data=custom_field)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
return Response(f"Site {site.name} was added!")
|
||||||
|
|
||||||
|
|
||||||
|
class GetUpdateSite(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageSitesPerms]
|
||||||
|
|
||||||
|
def get(self, request, pk):
|
||||||
|
site = get_object_or_404(Site, pk=pk)
|
||||||
|
return Response(SiteSerializer(site).data)
|
||||||
|
|
||||||
|
def put(self, request, pk):
|
||||||
|
site = get_object_or_404(Site, pk=pk)
|
||||||
|
|
||||||
|
if "client" in request.data["site"].keys() and (
|
||||||
|
site.client.id != request.data["site"]["client"]
|
||||||
|
and site.client.sites.count() == 1
|
||||||
|
):
|
||||||
|
return notify_error("A client must have at least one site")
|
||||||
|
|
||||||
serializer = SiteSerializer(
|
serializer = SiteSerializer(
|
||||||
data={"name": name, "client": request.data["client"]},
|
instance=site, data=request.data["site"], partial=True
|
||||||
context={"clientpk": request.data["client"]},
|
|
||||||
)
|
)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
return Response("ok")
|
# update custom field
|
||||||
|
if "custom_fields" in request.data.keys():
|
||||||
|
|
||||||
|
for field in request.data["custom_fields"]:
|
||||||
|
|
||||||
|
custom_field = field
|
||||||
|
custom_field["site"] = pk
|
||||||
|
|
||||||
|
if SiteCustomField.objects.filter(field=field["field"], site=pk):
|
||||||
|
value = SiteCustomField.objects.get(field=field["field"], site=pk)
|
||||||
|
serializer = SiteCustomFieldSerializer(
|
||||||
|
instance=value, data=custom_field, partial=True
|
||||||
|
)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
else:
|
||||||
|
serializer = SiteCustomFieldSerializer(data=custom_field)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
return Response("Site was edited!")
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteSite(APIView):
|
class DeleteSite(APIView):
|
||||||
def put(self, request, pk):
|
permission_classes = [IsAuthenticated, ManageSitesPerms]
|
||||||
|
|
||||||
site = get_object_or_404(Site, pk=pk)
|
def delete(self, request, pk, sitepk):
|
||||||
serializer = SiteSerializer(instance=site, data=request.data, partial=True)
|
from automation.tasks import generate_agent_checks_task
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
serializer.save()
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
def delete(self, request, pk):
|
|
||||||
site = get_object_or_404(Site, pk=pk)
|
site = get_object_or_404(Site, pk=pk)
|
||||||
if site.client.sites.count() == 1:
|
if site.client.sites.count() == 1:
|
||||||
return notify_error(f"A client must have at least 1 site.")
|
return notify_error("A client must have at least 1 site.")
|
||||||
|
|
||||||
agent_count = Agent.objects.filter(site=site).count()
|
agents = Agent.objects.filter(site=site)
|
||||||
|
|
||||||
if agent_count > 0:
|
if not sitepk:
|
||||||
return notify_error(
|
return notify_error(
|
||||||
f"Cannot delete {site.name} while {agent_count} agents exist in it. Move the agents to another site first."
|
"There needs to be a site specified to move the agents to"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
agent_site = get_object_or_404(Site, pk=sitepk)
|
||||||
|
|
||||||
|
agents.update(site=agent_site)
|
||||||
|
|
||||||
|
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||||
|
|
||||||
site.delete()
|
site.delete()
|
||||||
return Response(f"{site.name} was deleted!")
|
return Response(f"{site.name} was deleted!")
|
||||||
|
|
||||||
|
|
||||||
class AgentDeployment(APIView):
|
class AgentDeployment(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, ManageDeploymentPerms]
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
deps = Deployment.objects.all()
|
deps = Deployment.objects.all()
|
||||||
return Response(DeploymentSerializer(deps, many=True).data)
|
return Response(DeploymentSerializer(deps, many=True).data)
|
||||||
@@ -176,6 +296,8 @@ class GenerateAgent(APIView):
|
|||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
def get(self, request, uid):
|
def get(self, request, uid):
|
||||||
|
from tacticalrmm.utils import generate_winagent_exe
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_ = uuid.UUID(uid, version=4)
|
_ = uuid.UUID(uid, version=4)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@@ -183,99 +305,22 @@ class GenerateAgent(APIView):
|
|||||||
|
|
||||||
d = get_object_or_404(Deployment, uid=uid)
|
d = get_object_or_404(Deployment, uid=uid)
|
||||||
|
|
||||||
go_bin = "/usr/local/rmmgo/go/bin/go"
|
|
||||||
|
|
||||||
if not os.path.exists(go_bin):
|
|
||||||
return notify_error("Missing golang")
|
|
||||||
|
|
||||||
api = f"https://{request.get_host()}"
|
|
||||||
inno = (
|
|
||||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
|
||||||
if d.arch == "64"
|
|
||||||
else f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe"
|
|
||||||
)
|
|
||||||
download_url = settings.DL_64 if d.arch == "64" else settings.DL_32
|
|
||||||
|
|
||||||
client = d.client.name.replace(" ", "").lower()
|
client = d.client.name.replace(" ", "").lower()
|
||||||
site = d.site.name.replace(" ", "").lower()
|
site = d.site.name.replace(" ", "").lower()
|
||||||
client = re.sub(r"([^a-zA-Z0-9]+)", "", client)
|
client = re.sub(r"([^a-zA-Z0-9]+)", "", client)
|
||||||
site = re.sub(r"([^a-zA-Z0-9]+)", "", site)
|
site = re.sub(r"([^a-zA-Z0-9]+)", "", site)
|
||||||
|
|
||||||
ext = ".exe" if d.arch == "64" else "-x86.exe"
|
ext = ".exe" if d.arch == "64" else "-x86.exe"
|
||||||
|
|
||||||
file_name = f"rmm-{client}-{site}-{d.mon_type}{ext}"
|
file_name = f"rmm-{client}-{site}-{d.mon_type}{ext}"
|
||||||
exe = os.path.join(settings.EXE_DIR, file_name)
|
|
||||||
|
|
||||||
if os.path.exists(exe):
|
return generate_winagent_exe(
|
||||||
try:
|
client=d.client.pk,
|
||||||
os.remove(exe)
|
site=d.site.pk,
|
||||||
except:
|
agent_type=d.mon_type,
|
||||||
pass
|
rdp=d.install_flags["rdp"],
|
||||||
|
ping=d.install_flags["ping"],
|
||||||
goarch = "amd64" if d.arch == "64" else "386"
|
power=d.install_flags["power"],
|
||||||
cmd = [
|
arch=d.arch,
|
||||||
"env",
|
token=d.token_key,
|
||||||
"GOOS=windows",
|
api=f"https://{request.get_host()}",
|
||||||
f"GOARCH={goarch}",
|
file_name=file_name,
|
||||||
go_bin,
|
)
|
||||||
"build",
|
|
||||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
|
||||||
f"-X 'main.Api={api}'",
|
|
||||||
f"-X 'main.Client={d.client.pk}'",
|
|
||||||
f"-X 'main.Site={d.site.pk}'",
|
|
||||||
f"-X 'main.Atype={d.mon_type}'",
|
|
||||||
f"-X 'main.Rdp={d.install_flags['rdp']}'",
|
|
||||||
f"-X 'main.Ping={d.install_flags['ping']}'",
|
|
||||||
f"-X 'main.Power={d.install_flags['power']}'",
|
|
||||||
f"-X 'main.DownloadUrl={download_url}'",
|
|
||||||
f"-X 'main.Token={d.token_key}'\"",
|
|
||||||
"-o",
|
|
||||||
exe,
|
|
||||||
]
|
|
||||||
|
|
||||||
gen = [
|
|
||||||
"env",
|
|
||||||
"GOOS=windows",
|
|
||||||
f"GOARCH={goarch}",
|
|
||||||
go_bin,
|
|
||||||
"generate",
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
r1 = subprocess.run(
|
|
||||||
" ".join(gen),
|
|
||||||
capture_output=True,
|
|
||||||
shell=True,
|
|
||||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
return notify_error("genfailed")
|
|
||||||
|
|
||||||
if r1.returncode != 0:
|
|
||||||
return notify_error("genfailed")
|
|
||||||
|
|
||||||
try:
|
|
||||||
r = subprocess.run(
|
|
||||||
" ".join(cmd),
|
|
||||||
capture_output=True,
|
|
||||||
shell=True,
|
|
||||||
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
return notify_error("buildfailed")
|
|
||||||
|
|
||||||
if r.returncode != 0:
|
|
||||||
return notify_error("buildfailed")
|
|
||||||
|
|
||||||
if settings.DEBUG:
|
|
||||||
with open(exe, "rb") as f:
|
|
||||||
response = HttpResponse(
|
|
||||||
f.read(),
|
|
||||||
content_type="application/vnd.microsoft.portable-executable",
|
|
||||||
)
|
|
||||||
response["Content-Disposition"] = f"inline; filename={file_name}"
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
response = HttpResponse()
|
|
||||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
|
||||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
|
||||||
return response
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user