Compare commits
1087 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3588bf827b | ||
|
|
d66a41a8a3 | ||
|
|
90914bff14 | ||
|
|
62414848f4 | ||
|
|
d4ece6ecd7 | ||
|
|
d1ec60bb63 | ||
|
|
4f672c736b | ||
|
|
2e5c351d8b | ||
|
|
3562553346 | ||
|
|
4750b292a5 | ||
|
|
3eb0561e90 | ||
|
|
abb118c8ca | ||
|
|
2818a229b6 | ||
|
|
a9b8af3677 | ||
|
|
0354da00da | ||
|
|
b179587475 | ||
|
|
3021f90bc5 | ||
|
|
a14b0278c8 | ||
|
|
80070b333e | ||
|
|
3aa8dcac11 | ||
|
|
e920f05611 | ||
|
|
3594afd3aa | ||
|
|
9daaee8212 | ||
|
|
d022707349 | ||
|
|
3948605ae6 | ||
|
|
f2ded5fdd6 | ||
|
|
00b47be181 | ||
|
|
a2fac5d946 | ||
|
|
a00b5bb36b | ||
|
|
d4fbc34085 | ||
|
|
e9e3031992 | ||
|
|
c2c7553f56 | ||
|
|
4e60cb89c9 | ||
|
|
ec4523240f | ||
|
|
1655ddbcaa | ||
|
|
997c677f30 | ||
|
|
d5fc8a2d7e | ||
|
|
3bcd0302a8 | ||
|
|
de91b7e8af | ||
|
|
7efd1d7c9e | ||
|
|
b5151a2178 | ||
|
|
c8432020c6 | ||
|
|
2c9d413a1a | ||
|
|
cdf842e7ad | ||
|
|
c917007949 | ||
|
|
64278c6b3c | ||
|
|
10a01ed14a | ||
|
|
ba3bd1407b | ||
|
|
73666c9a04 | ||
|
|
eae24083c9 | ||
|
|
a644510c27 | ||
|
|
57859d0da2 | ||
|
|
057f0ff648 | ||
|
|
05d1c867f2 | ||
|
|
a2238fa435 | ||
|
|
12b7426a7c | ||
|
|
5148d613a7 | ||
|
|
f455c15882 | ||
|
|
618fdabd0e | ||
|
|
3b69e2896c | ||
|
|
7306b63ab1 | ||
|
|
7e3133caa2 | ||
|
|
560901d714 | ||
|
|
166ce9ae78 | ||
|
|
d3395a685e | ||
|
|
6d5e9a8566 | ||
|
|
69ec03feb4 | ||
|
|
f92982cd5a | ||
|
|
5570f2b464 | ||
|
|
ad19dc0240 | ||
|
|
9b1d4faff8 | ||
|
|
76756d20e9 | ||
|
|
e564500480 | ||
|
|
19c15ce58d | ||
|
|
a027785098 | ||
|
|
36a9f10aae | ||
|
|
99a11a4b53 | ||
|
|
55cac4465c | ||
|
|
ff395fd074 | ||
|
|
972b6e09c7 | ||
|
|
e793a33b15 | ||
|
|
e70d4ff3f3 | ||
|
|
cd0635d3a0 | ||
|
|
81702d8595 | ||
|
|
aaa4a65b04 | ||
|
|
430797e626 | ||
|
|
d454001f49 | ||
|
|
bd90ee1f58 | ||
|
|
196aaa5427 | ||
|
|
6e42233b33 | ||
|
|
8e44df8525 | ||
|
|
a8a1536941 | ||
|
|
99d1728c70 | ||
|
|
6bbb92cdb9 | ||
|
|
b80e7c06bf | ||
|
|
bf467b874c | ||
|
|
43c9f6be56 | ||
|
|
6811a4f4ae | ||
|
|
1f16dd9c43 | ||
|
|
63a43ce104 | ||
|
|
bd7ce5417e | ||
|
|
941ee54a97 | ||
|
|
a5d4a64f47 | ||
|
|
d96fcd4a98 | ||
|
|
de42e2f747 | ||
|
|
822a93aeb6 | ||
|
|
c31b4aaeff | ||
|
|
8c9a386054 | ||
|
|
8c90933615 | ||
|
|
6f8c242333 | ||
|
|
fe8b66873a | ||
|
|
00c5f1365a | ||
|
|
f7d317328a | ||
|
|
3ccd705225 | ||
|
|
9e439fffaa | ||
|
|
859dc170e7 | ||
|
|
1932d8fad9 | ||
|
|
0c814ae436 | ||
|
|
89313d8a37 | ||
|
|
2b85722222 | ||
|
|
57e5b0188c | ||
|
|
2d7c830e70 | ||
|
|
ccaa1790a9 | ||
|
|
f6531d905e | ||
|
|
64a31879d3 | ||
|
|
0c6a4b1ed2 | ||
|
|
67801f39fe | ||
|
|
892a0d67bf | ||
|
|
9fc0b7d5cc | ||
|
|
22a614ef54 | ||
|
|
cd257b8e4d | ||
|
|
fa1ee2ca14 | ||
|
|
34ea1adde6 | ||
|
|
41cf8abb1f | ||
|
|
c0ffec1a4c | ||
|
|
65779b8eaf | ||
|
|
c47bdb2d56 | ||
|
|
d47ae642e7 | ||
|
|
39c4609cc6 | ||
|
|
3ebba02a10 | ||
|
|
4dc7a96e79 | ||
|
|
5a49a29110 | ||
|
|
983a5c2034 | ||
|
|
15829f04a3 | ||
|
|
934618bc1c | ||
|
|
2c5ec75b88 | ||
|
|
df11fd744f | ||
|
|
4dba0fb43d | ||
|
|
7a0d86b8dd | ||
|
|
a94cd98e0f | ||
|
|
8e95e51edc | ||
|
|
6f1b00284a | ||
|
|
58549a6cac | ||
|
|
acc9a6118f | ||
|
|
c7811e861c | ||
|
|
55cf766ff0 | ||
|
|
a1eaf38324 | ||
|
|
c6788092d3 | ||
|
|
f89f74ef3f | ||
|
|
3e40f02001 | ||
|
|
c169967c1b | ||
|
|
2830e7c569 | ||
|
|
415f08ba3a | ||
|
|
d726bcdc19 | ||
|
|
f259c25a70 | ||
|
|
4db937cf1f | ||
|
|
dad9d0660c | ||
|
|
0c450a5bb2 | ||
|
|
ef59819c01 | ||
|
|
c651e7c84b | ||
|
|
20b8debb1c | ||
|
|
dd5743f0a1 | ||
|
|
7da2b51fae | ||
|
|
0236800392 | ||
|
|
4f822878f7 | ||
|
|
c2810e5fe5 | ||
|
|
b89ba4b801 | ||
|
|
07c680b839 | ||
|
|
fd50db4eab | ||
|
|
0ee95b36a6 | ||
|
|
b8cf07149e | ||
|
|
1b699f1a87 | ||
|
|
d3bfd238d3 | ||
|
|
1f43abb3c8 | ||
|
|
287c753e4a | ||
|
|
8a5374d31a | ||
|
|
e219eaa934 | ||
|
|
fd314480ca | ||
|
|
dd45396cf3 | ||
|
|
1e2a56c5e9 | ||
|
|
8011773af4 | ||
|
|
ddc69c692e | ||
|
|
df925c9744 | ||
|
|
1726341aad | ||
|
|
63b1ccc7a7 | ||
|
|
ee5db31518 | ||
|
|
e80397c857 | ||
|
|
81aa7ca1a4 | ||
|
|
f0f7695890 | ||
|
|
e7e8ce2f7a | ||
|
|
ba37a3f18d | ||
|
|
60b11a7a5d | ||
|
|
29461c20a7 | ||
|
|
2ff1f34543 | ||
|
|
b75d7f970f | ||
|
|
204681f097 | ||
|
|
e239fe95a4 | ||
|
|
0a101f061a | ||
|
|
f112a17afa | ||
|
|
54658a66d2 | ||
|
|
6b8f5a76e4 | ||
|
|
623a5d338d | ||
|
|
9c5565cfd5 | ||
|
|
722f2efaee | ||
|
|
4928264204 | ||
|
|
12d62ddc2a | ||
|
|
da54e97217 | ||
|
|
9c0993dac8 | ||
|
|
175486b7c4 | ||
|
|
4760a287f6 | ||
|
|
0237b48c87 | ||
|
|
95c9f22e6c | ||
|
|
9b001219d5 | ||
|
|
6ff15efc7b | ||
|
|
6fe1dccc7e | ||
|
|
1c80f6f3fa | ||
|
|
54d3177fdd | ||
|
|
a24ad245d2 | ||
|
|
f38cfdcadf | ||
|
|
92e4ad8ccd | ||
|
|
3f3ab088d2 | ||
|
|
2c2cbaa175 | ||
|
|
911b6bf863 | ||
|
|
31462cab64 | ||
|
|
1ee35da62d | ||
|
|
edf4815595 | ||
|
|
06ccee5d18 | ||
|
|
d5ad85725f | ||
|
|
4d5bddb413 | ||
|
|
2f4da7c381 | ||
|
|
8b845fce03 | ||
|
|
9fd15c38a9 | ||
|
|
ec1573d01f | ||
|
|
92ec1cc9e7 | ||
|
|
8b2f9665ce | ||
|
|
cb388a5a78 | ||
|
|
7f4389ae08 | ||
|
|
76d71beaa2 | ||
|
|
31bb9c2197 | ||
|
|
6a2cd5c45a | ||
|
|
520632514b | ||
|
|
f998b28d0b | ||
|
|
1a6587e9e6 | ||
|
|
9b4b729d19 | ||
|
|
e80345295e | ||
|
|
026c259a2e | ||
|
|
63474c2269 | ||
|
|
faa1a9312f | ||
|
|
23fa0726d5 | ||
|
|
22210eaf7d | ||
|
|
dcd8bee676 | ||
|
|
06f0fa8f0e | ||
|
|
6d0f9e2cd5 | ||
|
|
732afdb65d | ||
|
|
1a9e8742f7 | ||
|
|
b8eda37339 | ||
|
|
5107db6169 | ||
|
|
2c8f207454 | ||
|
|
489bc9c3b3 | ||
|
|
514713e883 | ||
|
|
17cc0cd09c | ||
|
|
4475df1295 | ||
|
|
fdad267cfd | ||
|
|
3684fc80f0 | ||
|
|
e97a5fef94 | ||
|
|
de2972631f | ||
|
|
e5b8fd67c8 | ||
|
|
5fade89e2d | ||
|
|
2eefedadb3 | ||
|
|
e63d7a0b8a | ||
|
|
2a1b1849fa | ||
|
|
0461cb7f19 | ||
|
|
0932e0be03 | ||
|
|
4638ac9474 | ||
|
|
d8d7255029 | ||
|
|
fa05276c3f | ||
|
|
e50a5d51d8 | ||
|
|
c03ba78587 | ||
|
|
ff07c69e7d | ||
|
|
735b84b26d | ||
|
|
8dd069ad67 | ||
|
|
1857e68003 | ||
|
|
ff2508382a | ||
|
|
9cb952b116 | ||
|
|
105e8089bb | ||
|
|
730f37f247 | ||
|
|
284716751f | ||
|
|
8d0db699bf | ||
|
|
53cf1cae58 | ||
|
|
307e4719e0 | ||
|
|
5effae787a | ||
|
|
6532be0b52 | ||
|
|
fb225a5347 | ||
|
|
b83830a45e | ||
|
|
ca28288c33 | ||
|
|
b6f8d9cb25 | ||
|
|
9cad0f11e5 | ||
|
|
807be08566 | ||
|
|
67f6a985f8 | ||
|
|
f87d54ae8d | ||
|
|
d894bf7271 | ||
|
|
56e0e5cace | ||
|
|
685084e784 | ||
|
|
cbeec5a973 | ||
|
|
3fff56bcd7 | ||
|
|
c504c23eec | ||
|
|
16dae5a655 | ||
|
|
e512c5ae7d | ||
|
|
094078b928 | ||
|
|
34fc3ff919 | ||
|
|
4391f48e78 | ||
|
|
775608a3c0 | ||
|
|
b326228901 | ||
|
|
b2e98173a8 | ||
|
|
65c9b7952c | ||
|
|
b9dc9e7d62 | ||
|
|
ce178d0354 | ||
|
|
a3ff6efebc | ||
|
|
6a9bc56723 | ||
|
|
c9ac158d25 | ||
|
|
4b937a0fe8 | ||
|
|
405bf26ac5 | ||
|
|
5dcda0e0a0 | ||
|
|
83e9b60308 | ||
|
|
10b40b4730 | ||
|
|
79d6d804ef | ||
|
|
e9c7b6d8f8 | ||
|
|
4fcfbfb3f4 | ||
|
|
30cde14ed3 | ||
|
|
cf76e6f538 | ||
|
|
d0f600ec8d | ||
|
|
675f9e956f | ||
|
|
381605a6bb | ||
|
|
0fce66062b | ||
|
|
747cc9e5da | ||
|
|
25a1b464da | ||
|
|
3b6738b547 | ||
|
|
fc93e3e97f | ||
|
|
0edbb13d48 | ||
|
|
673687341c | ||
|
|
3969208942 | ||
|
|
3fa89b58df | ||
|
|
a43a9c8543 | ||
|
|
45deda4dea | ||
|
|
6ec46f02a9 | ||
|
|
d643c17ff1 | ||
|
|
e5de89c6b4 | ||
|
|
c21e7c632d | ||
|
|
6ae771682a | ||
|
|
bf2075b902 | ||
|
|
62ec8c8f76 | ||
|
|
b84d4a99b8 | ||
|
|
cce9dfe585 | ||
|
|
166be395b9 | ||
|
|
fa3f5f8d68 | ||
|
|
2926b68c32 | ||
|
|
a55f187958 | ||
|
|
c76d263375 | ||
|
|
6740d97f8f | ||
|
|
b079eebe79 | ||
|
|
363e48a1e8 | ||
|
|
f60e4e3e4f | ||
|
|
1b02974efa | ||
|
|
496abdd230 | ||
|
|
bc495d77d1 | ||
|
|
fb54d4bb64 | ||
|
|
0786163dc3 | ||
|
|
ed85611e75 | ||
|
|
86ebfce44a | ||
|
|
dae51cff51 | ||
|
|
358a2e7220 | ||
|
|
d45353e8c8 | ||
|
|
2f56e4e3a1 | ||
|
|
0e503f8273 | ||
|
|
876fe803f5 | ||
|
|
6adb9678b6 | ||
|
|
39bf7ba4a9 | ||
|
|
5da6e2ff99 | ||
|
|
44603c41a2 | ||
|
|
0feb982a73 | ||
|
|
d93cb32f2e | ||
|
|
40c47eace2 | ||
|
|
509bdd879c | ||
|
|
b98ebb6e9f | ||
|
|
924ddecff0 | ||
|
|
ca64fd218d | ||
|
|
9b12b55acd | ||
|
|
450239564a | ||
|
|
bb1cc62d2a | ||
|
|
b4875c1e2d | ||
|
|
a21440d663 | ||
|
|
eb6836b63c | ||
|
|
b39a2690c1 | ||
|
|
706902da1c | ||
|
|
d5104b5d27 | ||
|
|
a13ae5c4b1 | ||
|
|
a92d1d9958 | ||
|
|
10852a9427 | ||
|
|
b757ce1e38 | ||
|
|
91e75f3fa2 | ||
|
|
6c8e55eb2f | ||
|
|
f821f700fa | ||
|
|
d76d24408f | ||
|
|
7ad85dfe1c | ||
|
|
7d8be0a719 | ||
|
|
bac15c18e4 | ||
|
|
2f266d39e6 | ||
|
|
5726d1fc52 | ||
|
|
69aee1823e | ||
|
|
e6a0ae5f57 | ||
|
|
e5df566c7a | ||
|
|
81e173b609 | ||
|
|
d0ebcc6606 | ||
|
|
99c3fcf42a | ||
|
|
794666e7cc | ||
|
|
45abe4955d | ||
|
|
7eed421c70 | ||
|
|
69f7c397c2 | ||
|
|
d2d136e922 | ||
|
|
396e435ae0 | ||
|
|
45d8e9102a | ||
|
|
12a51deffa | ||
|
|
f2f69abec2 | ||
|
|
02b7f962e9 | ||
|
|
eb813e6b22 | ||
|
|
5ddc604341 | ||
|
|
313e672e93 | ||
|
|
ce77ad6de4 | ||
|
|
bea22690b1 | ||
|
|
c9a52bd7d0 | ||
|
|
a244a341ec | ||
|
|
2b47870032 | ||
|
|
de9e35ae6a | ||
|
|
1a6fec8ca9 | ||
|
|
094054cd99 | ||
|
|
f85b8a81f1 | ||
|
|
a44eaebf7c | ||
|
|
f37b3c063e | ||
|
|
6e5d5a3b82 | ||
|
|
bf0562d619 | ||
|
|
ecaa81be3c | ||
|
|
d98ae48935 | ||
|
|
f52a76b16c | ||
|
|
d421c27602 | ||
|
|
70e4cd4de1 | ||
|
|
29767e9265 | ||
|
|
46d4c7f96d | ||
|
|
161a6f3923 | ||
|
|
53e912341b | ||
|
|
19396ea11a | ||
|
|
1d9a5e742b | ||
|
|
e8dfdd03f7 | ||
|
|
2f5b15dac7 | ||
|
|
525e1f5136 | ||
|
|
7d63d188af | ||
|
|
87889c12ea | ||
|
|
53d023f5ee | ||
|
|
1877ab8c67 | ||
|
|
72a5a8cab7 | ||
|
|
221e49a978 | ||
|
|
1a4c67d173 | ||
|
|
42fd23ece3 | ||
|
|
3035c0712a | ||
|
|
61315f8bfd | ||
|
|
52683124d8 | ||
|
|
1f77390366 | ||
|
|
322d492540 | ||
|
|
f977d8cca9 | ||
|
|
a9aedea2bd | ||
|
|
5560bbeecb | ||
|
|
f226206703 | ||
|
|
170687226d | ||
|
|
d56d3dc271 | ||
|
|
32a202aff4 | ||
|
|
6ee75e6e60 | ||
|
|
13d74cae3b | ||
|
|
88651916b0 | ||
|
|
be12505d2f | ||
|
|
23fcf3b045 | ||
|
|
9e7459b204 | ||
|
|
4f0eb1d566 | ||
|
|
ce00481f47 | ||
|
|
f596af90ba | ||
|
|
5c74d1d021 | ||
|
|
aff659b6b6 | ||
|
|
58724d95fa | ||
|
|
8d61fcd5c9 | ||
|
|
3e1be53c36 | ||
|
|
f3754588bd | ||
|
|
c4ffffeec8 | ||
|
|
5b69f6a358 | ||
|
|
1af89a7447 | ||
|
|
90abd81035 | ||
|
|
898824b13f | ||
|
|
9d093aa7f8 | ||
|
|
1770549f6c | ||
|
|
d21be77fd2 | ||
|
|
41a1c19877 | ||
|
|
9b6571ce68 | ||
|
|
88e98e4e35 | ||
|
|
10c56ffbfa | ||
|
|
cb2c8d6f3c | ||
|
|
ca62b850ce | ||
|
|
5a75d4e140 | ||
|
|
e0972b7c24 | ||
|
|
0db497916d | ||
|
|
23a0ad3c4e | ||
|
|
2b4e1c4b67 | ||
|
|
9b1b9244cf | ||
|
|
ad570e9b16 | ||
|
|
812ba6de62 | ||
|
|
8f97124adb | ||
|
|
28289838f9 | ||
|
|
cca8a010c3 | ||
|
|
91ab296692 | ||
|
|
ee6c9c4272 | ||
|
|
21cd36fa92 | ||
|
|
b1aafe3dbc | ||
|
|
5cd832de89 | ||
|
|
24dd9d0518 | ||
|
|
aab6ab810a | ||
|
|
d1d6d5e71e | ||
|
|
e67dd68522 | ||
|
|
e25eae846d | ||
|
|
995eeaa455 | ||
|
|
240c61b967 | ||
|
|
2d8b0753b4 | ||
|
|
44eab3de7f | ||
|
|
007be5bf95 | ||
|
|
ee19c7c51f | ||
|
|
ce56afbdf9 | ||
|
|
51012695a1 | ||
|
|
0eef2d2cc5 | ||
|
|
487f9f2815 | ||
|
|
d065adcd8e | ||
|
|
0d9a1dc5eb | ||
|
|
8f9ad15108 | ||
|
|
e538e9b843 | ||
|
|
4a702b6813 | ||
|
|
1e6fd2c57a | ||
|
|
600b959d89 | ||
|
|
b96de9eb13 | ||
|
|
93be19b647 | ||
|
|
74f45f6f1d | ||
|
|
54ba3d2888 | ||
|
|
65d5149f60 | ||
|
|
917ebb3771 | ||
|
|
7e66b1f545 | ||
|
|
05837dca35 | ||
|
|
53be2ebe59 | ||
|
|
0341efcaea | ||
|
|
ec75210fd3 | ||
|
|
e6afe3e806 | ||
|
|
5aa46f068e | ||
|
|
a11a5b28bc | ||
|
|
907aa566ca | ||
|
|
5c21f099a8 | ||
|
|
b91201ae3e | ||
|
|
56d7e19968 | ||
|
|
cf91c6c90e | ||
|
|
9011148adf | ||
|
|
897d0590d2 | ||
|
|
33b33e8458 | ||
|
|
7758f5c187 | ||
|
|
83d7a03ba4 | ||
|
|
a9a0df9699 | ||
|
|
df44f8f5f8 | ||
|
|
216a9ed035 | ||
|
|
35d61b6a6c | ||
|
|
5fb72cea53 | ||
|
|
d54d021e9f | ||
|
|
06e78311df | ||
|
|
df720f95ca | ||
|
|
00faff34d3 | ||
|
|
2b5b3ea4f3 | ||
|
|
95e608d0b4 | ||
|
|
1d55bf87dd | ||
|
|
1220ce53eb | ||
|
|
2006218f87 | ||
|
|
40f427a387 | ||
|
|
445e95baed | ||
|
|
67fbc9ad33 | ||
|
|
1253e9e465 | ||
|
|
21069432e8 | ||
|
|
6facf6a324 | ||
|
|
7556197485 | ||
|
|
8dddd2d896 | ||
|
|
f319c95c2b | ||
|
|
8e972b0907 | ||
|
|
395e400215 | ||
|
|
3685e3111f | ||
|
|
7bb1c75dc6 | ||
|
|
b20834929c | ||
|
|
181891757e | ||
|
|
b16feeae44 | ||
|
|
684e049f27 | ||
|
|
8cebd901b2 | ||
|
|
3c96beb8fb | ||
|
|
8a46459cf9 | ||
|
|
be5c3e9daa | ||
|
|
e44453877c | ||
|
|
f772a4ec56 | ||
|
|
44182ec683 | ||
|
|
b9ab13fa53 | ||
|
|
2ad6721c95 | ||
|
|
b7d0604e62 | ||
|
|
a7518b4b26 | ||
|
|
50613f5d3e | ||
|
|
f814767703 | ||
|
|
4af86d6456 | ||
|
|
f0a4f00c2d | ||
|
|
4321affddb | ||
|
|
926ed55b9b | ||
|
|
2ebf308565 | ||
|
|
1c5e736dce | ||
|
|
b591f9f5b7 | ||
|
|
9724882578 | ||
|
|
ddef2df101 | ||
|
|
8af69c4284 | ||
|
|
6ebe1ab467 | ||
|
|
24e4d9cf6d | ||
|
|
f35fa0aa58 | ||
|
|
4942f262f1 | ||
|
|
a20b1a973e | ||
|
|
eae5e00706 | ||
|
|
403762d862 | ||
|
|
5c92d4b454 | ||
|
|
38179b9d38 | ||
|
|
8f510dde5a | ||
|
|
be42d56e37 | ||
|
|
6294530fa3 | ||
|
|
c5c8f5fab1 | ||
|
|
3d41d79078 | ||
|
|
3005061a11 | ||
|
|
65ea46f457 | ||
|
|
eca8f32570 | ||
|
|
8d1ef19c61 | ||
|
|
71d87d866b | ||
|
|
c4f88bdce7 | ||
|
|
f722a115b1 | ||
|
|
1583beea7b | ||
|
|
5b388c587b | ||
|
|
e254923167 | ||
|
|
b0dbdd7803 | ||
|
|
aa6ebe0122 | ||
|
|
c5f179bab8 | ||
|
|
e65cb86638 | ||
|
|
a349998640 | ||
|
|
43f60610b8 | ||
|
|
46d042087a | ||
|
|
ee214727f6 | ||
|
|
b4c1ec55ec | ||
|
|
0fdd54f710 | ||
|
|
4f0cdeaec0 | ||
|
|
e5cc38857c | ||
|
|
fe4b9d71c0 | ||
|
|
5c1181e40e | ||
|
|
8b71832bc2 | ||
|
|
8412ed6065 | ||
|
|
207f6cdc7c | ||
|
|
b0b51f5730 | ||
|
|
def6833ef0 | ||
|
|
c528dd3de1 | ||
|
|
544270e35d | ||
|
|
657e029fee | ||
|
|
49469d7689 | ||
|
|
4f0dd452c8 | ||
|
|
3f741eab11 | ||
|
|
190368788f | ||
|
|
8306a3f566 | ||
|
|
988c134c09 | ||
|
|
af0a4d578b | ||
|
|
9bc0abc831 | ||
|
|
41410e99e7 | ||
|
|
deae04d5ff | ||
|
|
7d6eeffd66 | ||
|
|
629858e095 | ||
|
|
dfdb628347 | ||
|
|
6e48b28fc9 | ||
|
|
3ba450e837 | ||
|
|
688ed93500 | ||
|
|
7268ba20a2 | ||
|
|
63d9e73098 | ||
|
|
564c048f90 | ||
|
|
5f801c74d5 | ||
|
|
b405fbc09a | ||
|
|
7a64c2eb49 | ||
|
|
c93cbac3b1 | ||
|
|
8b0f67b8a6 | ||
|
|
0d96129f2d | ||
|
|
54ee12d2b3 | ||
|
|
92fc042103 | ||
|
|
9bb7016fa7 | ||
|
|
3ad56feafb | ||
|
|
14d59c3dec | ||
|
|
443f419770 | ||
|
|
ddbb58755e | ||
|
|
524283b9ff | ||
|
|
fb178d2944 | ||
|
|
52f4ad9403 | ||
|
|
ba0c08ef1f | ||
|
|
9e19b1e04c | ||
|
|
b2118201b1 | ||
|
|
b4346aa056 | ||
|
|
b599f05aab | ||
|
|
93d78a0200 | ||
|
|
449957b2eb | ||
|
|
0a6d44bad3 | ||
|
|
17ceaaa503 | ||
|
|
d70803b416 | ||
|
|
aa414d4702 | ||
|
|
f24e1b91ea | ||
|
|
1df8163090 | ||
|
|
659ddf6a45 | ||
|
|
e110068da4 | ||
|
|
c943f6f936 | ||
|
|
cb1fe7fe54 | ||
|
|
593f1f63cc | ||
|
|
66aa70cf75 | ||
|
|
304be99067 | ||
|
|
9a01ec35f4 | ||
|
|
bfa5b4fba5 | ||
|
|
d2f63ef353 | ||
|
|
50f334425e | ||
|
|
f78212073c | ||
|
|
5c655f5a82 | ||
|
|
6a6446bfcb | ||
|
|
b60a3a5e50 | ||
|
|
02ccbab8e5 | ||
|
|
023ff3f964 | ||
|
|
7c5e8df3b8 | ||
|
|
56fdab260b | ||
|
|
7cce49dc1a | ||
|
|
2dfaafb20b | ||
|
|
6138a5bf54 | ||
|
|
828c67cc00 | ||
|
|
e70cd44e18 | ||
|
|
efa5ac5edd | ||
|
|
788b11e759 | ||
|
|
d049d7a61f | ||
|
|
075c833b58 | ||
|
|
e9309c2a96 | ||
|
|
a592d2b397 | ||
|
|
3ad1805ac0 | ||
|
|
dbc2bab698 | ||
|
|
79eec5c299 | ||
|
|
7754b0c575 | ||
|
|
be4289ce76 | ||
|
|
67f5226270 | ||
|
|
b6d77c581b | ||
|
|
d84bf47d04 | ||
|
|
aba3a7bb9e | ||
|
|
6281736d89 | ||
|
|
94d96f89d3 | ||
|
|
4b55f9dead | ||
|
|
5c6dce94df | ||
|
|
f7d8f9c7f5 | ||
|
|
053df24f9c | ||
|
|
1dc470e434 | ||
|
|
cfd8773267 | ||
|
|
67045cf6c1 | ||
|
|
ddfb9e7239 | ||
|
|
9f6eed5472 | ||
|
|
15a1e2ebcb | ||
|
|
fcfe450b07 | ||
|
|
a69bbb3bc9 | ||
|
|
6d2559cfc1 | ||
|
|
b3a62615f3 | ||
|
|
57f5cca1cb | ||
|
|
6b9851f540 | ||
|
|
36fd203a88 | ||
|
|
3f5cb5d61c | ||
|
|
862fc6a946 | ||
|
|
92c386ac0e | ||
|
|
98a11a3645 | ||
|
|
62be0ed936 | ||
|
|
b7de73fd8a | ||
|
|
e2413f1af2 | ||
|
|
0e77d575c4 | ||
|
|
ba42c5e367 | ||
|
|
6a06734192 | ||
|
|
5e26a406b7 | ||
|
|
b6dd03138d | ||
|
|
cf03ee03ee | ||
|
|
0e665b6bf0 | ||
|
|
e3d0de7313 | ||
|
|
bcf3a543a1 | ||
|
|
b27f17c74a | ||
|
|
75d864771e | ||
|
|
6420060f2a | ||
|
|
c149ae71b9 | ||
|
|
3a49dd034c | ||
|
|
b26d7e82e3 | ||
|
|
415abdf0ce | ||
|
|
f7f6f6ecb2 | ||
|
|
43d54f134a | ||
|
|
0d2606a13b | ||
|
|
1deb10dc88 | ||
|
|
1236d55544 | ||
|
|
ecccf39455 | ||
|
|
8e0316825a | ||
|
|
aa45fa87af | ||
|
|
71e78bd0c5 | ||
|
|
4766477c58 | ||
|
|
d97e49ff2b | ||
|
|
6b9d775cb9 | ||
|
|
e521f580d7 | ||
|
|
25e7cf7db0 | ||
|
|
0cab33787d | ||
|
|
bc6faf817f | ||
|
|
d46ae55863 | ||
|
|
bbd900ab25 | ||
|
|
129ae93e2b | ||
|
|
44dd59fa3f | ||
|
|
ec4e7559b0 | ||
|
|
dce40611cf | ||
|
|
e71b8546f9 | ||
|
|
f827348467 | ||
|
|
f3978343db | ||
|
|
2654a7ea70 | ||
|
|
1068bf4ef7 | ||
|
|
e7fccc97cc | ||
|
|
733e289852 | ||
|
|
29d71a104c | ||
|
|
05200420ad | ||
|
|
eb762d4bfd | ||
|
|
58ace9eda1 | ||
|
|
eeb2623be0 | ||
|
|
cfa242c2fe | ||
|
|
ec0441ccc2 | ||
|
|
ae2782a8fe | ||
|
|
58ff570251 | ||
|
|
7b554b12c7 | ||
|
|
58f7603d4f | ||
|
|
8895994c54 | ||
|
|
de8f7e36d5 | ||
|
|
88d7a50265 | ||
|
|
21e19fc7e5 | ||
|
|
faf4935a69 | ||
|
|
71a1f9d74a | ||
|
|
bd8d523e10 | ||
|
|
60cae0e3ac | ||
|
|
5a342ac012 | ||
|
|
bb8767dfc3 | ||
|
|
fcb2779c15 | ||
|
|
77dd6c1f61 | ||
|
|
8118eef300 | ||
|
|
802d1489fe | ||
|
|
443a029185 | ||
|
|
4ee508fdd0 | ||
|
|
aa5608f7e8 | ||
|
|
cc472b4613 | ||
|
|
764b945ddc | ||
|
|
fd2206ce4c | ||
|
|
48c0ac9f00 | ||
|
|
84eb4fe9ed | ||
|
|
4a5428812c | ||
|
|
023f98a89d | ||
|
|
66893dd0c1 | ||
|
|
25a6666e35 | ||
|
|
19d75309b5 | ||
|
|
11110d65c1 | ||
|
|
a348f58fe2 | ||
|
|
13851dd976 | ||
|
|
2ec37c5da9 | ||
|
|
8c127160de | ||
|
|
2af820de9a | ||
|
|
55fb0bb3a0 | ||
|
|
9f9ecc521f | ||
|
|
dfd01df5ba | ||
|
|
474090698c | ||
|
|
6b71cdeea4 | ||
|
|
581e974236 | ||
|
|
ba3c3a42ce | ||
|
|
c8bc5671c5 | ||
|
|
ff9401a040 | ||
|
|
5e1bc1989f | ||
|
|
a1dc91cd7d | ||
|
|
99f2772bb3 | ||
|
|
e5d0e42655 | ||
|
|
2c914cc374 | ||
|
|
9bceb62381 | ||
|
|
de7518a800 | ||
|
|
304fb63453 | ||
|
|
0f7ef60ca0 | ||
|
|
07c74e4641 | ||
|
|
de7f325cfb | ||
|
|
42cdf70cb4 | ||
|
|
6beb6be131 | ||
|
|
fa4fc2a708 | ||
|
|
2db9758260 | ||
|
|
715982e40a | ||
|
|
d00cd4453a | ||
|
|
429c08c24a | ||
|
|
6a71490e20 | ||
|
|
9bceda0646 | ||
|
|
a1027a6773 | ||
|
|
302d4b75f9 | ||
|
|
5f6ee0e883 | ||
|
|
27f9720de1 | ||
|
|
22aa3fdbbc | ||
|
|
069ecdd33f | ||
|
|
dd545ae933 | ||
|
|
6650b705c4 | ||
|
|
59b0350289 | ||
|
|
1ad159f820 | ||
|
|
0bf42190e9 | ||
|
|
d2fa836232 | ||
|
|
c387774093 | ||
|
|
e99736ba3c | ||
|
|
16cb54fcc9 | ||
|
|
5aa15c51ec | ||
|
|
a8aedd9cf3 | ||
|
|
b851b632bc | ||
|
|
541e07fb65 | ||
|
|
6ad16a897d | ||
|
|
72f1053a93 | ||
|
|
fb15a2762c | ||
|
|
9165248b91 | ||
|
|
add18b29db | ||
|
|
1971653548 | ||
|
|
392cd64d7b | ||
|
|
b5affbb7c8 | ||
|
|
71d1206277 | ||
|
|
26e6a8c409 | ||
|
|
eb54fae11a | ||
|
|
ee773e5966 | ||
|
|
7218ccdba8 | ||
|
|
332400e48a | ||
|
|
ad1a5d3702 | ||
|
|
3006b4184d | ||
|
|
84eb84a080 | ||
|
|
60beea548b | ||
|
|
5f9c149e59 | ||
|
|
53367c6f04 | ||
|
|
d7f817ee44 | ||
|
|
d33a87da54 | ||
|
|
3aebfb12b7 | ||
|
|
1d6c55ffa6 | ||
|
|
5e7080aac3 | ||
|
|
fad739bc01 | ||
|
|
c6b7f23884 | ||
|
|
a6f7e446de | ||
|
|
89d95d3ae1 | ||
|
|
764208698f | ||
|
|
57129cf934 | ||
|
|
aae1a842d5 | ||
|
|
623f35aec7 | ||
|
|
870bf842cf | ||
|
|
07f2d7dd5c | ||
|
|
f223f2edc5 | ||
|
|
e848a9a577 | ||
|
|
7569d98e07 | ||
|
|
596dee2f24 | ||
|
|
9970403964 | ||
|
|
07a88ae00d | ||
|
|
5475b4d287 | ||
|
|
6631dcfd3e | ||
|
|
0dd3f337f3 | ||
|
|
8eb27b5875 | ||
|
|
2d1863031c | ||
|
|
9feb76ca81 | ||
|
|
993e8f4ab3 | ||
|
|
e08ae95d4f | ||
|
|
15359e8846 | ||
|
|
d1457b312b | ||
|
|
c9dd2af196 | ||
|
|
564ef4e688 | ||
|
|
a33e6e8bb5 | ||
|
|
cf34f33f04 | ||
|
|
827cfe4e8f | ||
|
|
2ce1c2383c | ||
|
|
6fc0a665ae | ||
|
|
4f16d01263 | ||
|
|
67cc37354a | ||
|
|
e388243ef4 | ||
|
|
3dc92763c7 | ||
|
|
dfe97dd466 | ||
|
|
2803cee29b | ||
|
|
3a03020e54 | ||
|
|
64443cc703 | ||
|
|
4d1aa6ed18 | ||
|
|
84837e88d2 | ||
|
|
ff49c936ea | ||
|
|
e6e0901329 | ||
|
|
23b6284b51 | ||
|
|
33dfbcbe32 | ||
|
|
700c23d537 | ||
|
|
369fac9e38 | ||
|
|
2229eb1167 | ||
|
|
a3dec841b6 | ||
|
|
b17620bdb6 | ||
|
|
f39cd5ae2f | ||
|
|
83a19e005b | ||
|
|
a9dd01b0c8 | ||
|
|
eb59afa1d1 | ||
|
|
2adcfce9d0 | ||
|
|
314ab9b304 | ||
|
|
8576fb82c7 | ||
|
|
0f95a6bb2f | ||
|
|
ad5104567d | ||
|
|
ece68ba1d5 | ||
|
|
acccd3a586 | ||
|
|
8ebef1c1ca | ||
|
|
28abc0d5ed | ||
|
|
1efe25d3ec | ||
|
|
c40e4f8e4b | ||
|
|
baca84092d | ||
|
|
346d4da059 | ||
|
|
ade64d6c0a | ||
|
|
8204bdfc5f | ||
|
|
1a9bb3e986 | ||
|
|
49356479e5 | ||
|
|
c44e9a7292 | ||
|
|
21771a593f | ||
|
|
84458dfc4c | ||
|
|
5835632dab | ||
|
|
67aa7229ef | ||
|
|
b72dc3ed3a | ||
|
|
0f93d4a5bd | ||
|
|
106320b035 | ||
|
|
63951705cd | ||
|
|
a8d56921d5 | ||
|
|
10bc133cf1 | ||
|
|
adeb5b35c9 | ||
|
|
589ff46ea5 | ||
|
|
656fcb9fe7 | ||
|
|
1cb9353006 | ||
|
|
57bf16ba07 | ||
|
|
659846ed88 | ||
|
|
25894044e0 | ||
|
|
e7a0826beb | ||
|
|
1f7ddee23b | ||
|
|
7e186730db | ||
|
|
6713a50208 | ||
|
|
7c9d8fcfec | ||
|
|
33bfc8cfe8 | ||
|
|
ca735bc14a | ||
|
|
4ba748a18b | ||
|
|
f1845106f8 | ||
|
|
67e7156c4b | ||
|
|
4a476adebf | ||
|
|
918798f8cc | ||
|
|
5a3f868866 | ||
|
|
feea2c6396 | ||
|
|
707b4c46d9 | ||
|
|
89ca39fc2b | ||
|
|
204281b12d | ||
|
|
a8538a7e95 | ||
|
|
dee1b471e9 | ||
|
|
aa04e9b01f | ||
|
|
350f0dc604 | ||
|
|
6021f2efd6 | ||
|
|
51838ec25a | ||
|
|
54768a121e | ||
|
|
8ff72cdca3 | ||
|
|
2cb53ad06b | ||
|
|
b8349de31d | ||
|
|
d7e11af7f8 | ||
|
|
dd8d39e698 | ||
|
|
afb1316daa | ||
|
|
04d7017536 | ||
|
|
6a1c75b060 | ||
|
|
5c94611f3b | ||
|
|
4e5676e80f | ||
|
|
c96d688a9c | ||
|
|
804242e9a5 | ||
|
|
0ec9760b17 | ||
|
|
d481ae3da4 | ||
|
|
4742c14fc1 | ||
|
|
509b0d501b | ||
|
|
d4c9b04d4e | ||
|
|
16fb4d331b | ||
|
|
e9e5bf31a7 | ||
|
|
221418120e | ||
|
|
46f852e26e | ||
|
|
4234cf0a31 |
@@ -26,3 +26,6 @@ POSTGRES_PASS=postgrespass
|
||||
APP_PORT=80
|
||||
API_PORT=80
|
||||
HTTP_PROTOCOL=https
|
||||
DOCKER_NETWORK=172.21.0.0/24
|
||||
DOCKER_NGINX_IP=172.21.0.20
|
||||
NATS_PORTS=4222:4222
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9.2-slim
|
||||
FROM python:3.9.9-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
@@ -13,12 +13,13 @@ EXPOSE 8000 8383 8005
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
# Copy dev python reqs
|
||||
COPY .devcontainer/requirements.txt /
|
||||
|
||||
# Copy Docker Entrypoint
|
||||
COPY ./entrypoint.sh /
|
||||
# Copy docker entrypoint.sh
|
||||
COPY .devcontainer/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||
|
||||
@@ -6,9 +6,9 @@ services:
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-api"]
|
||||
context: ..
|
||||
dockerfile: .devcontainer/api.dockerfile
|
||||
command: [ "tactical-api" ]
|
||||
environment:
|
||||
API_PORT: ${API_PORT}
|
||||
ports:
|
||||
@@ -18,14 +18,15 @@ services:
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
|
||||
app-dev:
|
||||
container_name: trmm-app-dev
|
||||
image: node:14-alpine
|
||||
restart: always
|
||||
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve
|
||||
-- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
working_dir: /workspace/web
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
@@ -33,7 +34,7 @@ services:
|
||||
- "8080:${APP_PORT}"
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
aliases:
|
||||
- tactical-frontend
|
||||
|
||||
# nats
|
||||
@@ -46,7 +47,7 @@ services:
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
ports:
|
||||
- "4222:4222"
|
||||
- "${NATS_PORTS}"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
@@ -61,13 +62,13 @@ services:
|
||||
container_name: trmm-meshcentral-dev
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
environment:
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
NGINX_HOST_IP: 172.21.0.20
|
||||
NGINX_HOST_IP: ${DOCKER_NGINX_IP}
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
@@ -115,7 +116,10 @@ services:
|
||||
redis-dev:
|
||||
container_name: trmm-redis-dev
|
||||
restart: always
|
||||
command: redis-server --appendonly yes
|
||||
image: redis:6.0-alpine
|
||||
volumes:
|
||||
- redis-data-dev:/data
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
@@ -124,11 +128,8 @@ services:
|
||||
init-dev:
|
||||
container_name: trmm-init-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
restart: on-failure
|
||||
command: ["tactical-init-dev"]
|
||||
command: [ "tactical-init-dev" ]
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||
@@ -153,10 +154,7 @@ services:
|
||||
celery-dev:
|
||||
container_name: trmm-celery-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celery-dev"]
|
||||
command: [ "tactical-celery-dev" ]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
@@ -171,10 +169,7 @@ services:
|
||||
celerybeat-dev:
|
||||
container_name: trmm-celerybeat-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celerybeat-dev"]
|
||||
command: [ "tactical-celerybeat-dev" ]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
@@ -189,10 +184,7 @@ services:
|
||||
websockets-dev:
|
||||
container_name: trmm-websockets-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-websockets-dev"]
|
||||
command: [ "tactical-websockets-dev" ]
|
||||
restart: always
|
||||
networks:
|
||||
dev:
|
||||
@@ -218,9 +210,10 @@ services:
|
||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||
APP_PORT: ${APP_PORT}
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
networks:
|
||||
dev:
|
||||
ipv4_address: 172.21.0.20
|
||||
ipv4_address: ${DOCKER_NGINX_IP}
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
@@ -231,10 +224,7 @@ services:
|
||||
container_name: trmm-mkdocs-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-mkdocs-dev"]
|
||||
command: [ "tactical-mkdocs-dev" ]
|
||||
ports:
|
||||
- "8005:8005"
|
||||
volumes:
|
||||
@@ -243,10 +233,11 @@ services:
|
||||
- dev
|
||||
|
||||
volumes:
|
||||
tactical-data-dev:
|
||||
postgres-data-dev:
|
||||
mongo-dev-data:
|
||||
mesh-data-dev:
|
||||
tactical-data-dev: null
|
||||
postgres-data-dev: null
|
||||
mongo-dev-data: null
|
||||
mesh-data-dev: null
|
||||
redis-data-dev: null
|
||||
|
||||
networks:
|
||||
dev:
|
||||
@@ -254,4 +245,4 @@ networks:
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.21.0.0/24
|
||||
- subnet: ${DOCKER_NETWORK}
|
||||
|
||||
@@ -9,7 +9,8 @@ set -e
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||
: "${MESH_SERVICE:=tactical-meshcentral}"
|
||||
: "${MESH_WS_URL:=ws://${MESH_SERVICE}:443}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
: "${MESH_HOST:=tactical-meshcentral}"
|
||||
@@ -20,6 +21,9 @@ set -e
|
||||
: "${APP_PORT:=8080}"
|
||||
: "${API_PORT:=8000}"
|
||||
|
||||
: "${CERT_PRIV_PATH:=${TACTICAL_DIR}/certs/privkey.pem}"
|
||||
: "${CERT_PUB_PATH:=${TACTICAL_DIR}/certs/fullchain.pem}"
|
||||
|
||||
# Add python venv to path
|
||||
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||
|
||||
@@ -37,7 +41,7 @@ function django_setup {
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
||||
until (echo > /dev/tcp/"${MESH_SERVICE}"/443) &> /dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
@@ -56,8 +60,8 @@ DEBUG = True
|
||||
|
||||
DOCKER_BUILD = True
|
||||
|
||||
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||
CERT_FILE = '${CERT_PUB_PATH}'
|
||||
KEY_FILE = '${CERT_PRIV_PATH}'
|
||||
|
||||
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
||||
|
||||
@@ -78,28 +82,11 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
MESH_WS_URL = '${MESH_WS_URL}'
|
||||
ADMIN_ENABLED = True
|
||||
EOF
|
||||
)"
|
||||
@@ -114,6 +101,10 @@ EOF
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_natsapi_conf
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py post_update_tasks
|
||||
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
asyncio-nats-client
|
||||
celery
|
||||
channels
|
||||
Django
|
||||
channels_redis
|
||||
django-ipware
|
||||
Django==3.2.10
|
||||
django-cors-headers
|
||||
django-rest-knox
|
||||
djangorestframework
|
||||
@@ -33,3 +35,5 @@ Pygments
|
||||
mypy
|
||||
pysnooper
|
||||
isort
|
||||
drf_spectacular
|
||||
pandas
|
||||
|
||||
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ develop ]
|
||||
schedule:
|
||||
- cron: '19 14 * * 6'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
34
.github/workflows/devskim-analysis.yml
vendored
Normal file
34
.github/workflows/devskim-analysis.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
name: DevSkim
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
branches: [ develop ]
|
||||
schedule:
|
||||
- cron: '19 5 * * 0'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: DevSkim
|
||||
runs-on: ubuntu-20.04
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Run DevSkim scanner
|
||||
uses: microsoft/DevSkim-Action@v1
|
||||
|
||||
- name: Upload DevSkim scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v1
|
||||
with:
|
||||
sarif_file: devskim-results.sarif
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -47,3 +47,7 @@ docs/.vuepress/dist
|
||||
nats-rmm.conf
|
||||
.mypy_cache
|
||||
docs/site/
|
||||
reset_db.sh
|
||||
run_go_cmd.py
|
||||
nats-api.conf
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ Tactical RMM is a remote monitoring & management tool for Windows computers, bui
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
@@ -35,4 +35,4 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
|
||||
## Installation / Backup / Restore / Usage
|
||||
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
|
||||
19
SECURITY.md
Normal file
19
SECURITY.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Use this section to tell people about which versions of your project are
|
||||
currently being supported with security updates.
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 0.10.4 | :white_check_mark: |
|
||||
| < 0.10.4| :x: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Use this section to tell people how to report a vulnerability.
|
||||
|
||||
Tell them where to go, how often they can expect to get an update on a
|
||||
reported vulnerability, what to expect if the vulnerability is accepted or
|
||||
declined, etc.
|
||||
@@ -21,4 +21,6 @@ omit =
|
||||
*/tests.py
|
||||
*/test.py
|
||||
checks/utils.py
|
||||
*/asgi.py
|
||||
*/demo_views.py
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from django.contrib import admin
|
||||
from rest_framework.authtoken.admin import TokenAdmin
|
||||
|
||||
from .models import User
|
||||
from .models import User, Role
|
||||
|
||||
admin.site.register(User)
|
||||
TokenAdmin.raw_id_fields = ("user",)
|
||||
admin.site.register(Role)
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
import uuid
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates the installer user"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
if User.objects.filter(is_installer_user=True).exists():
|
||||
return
|
||||
|
||||
User.objects.create_user( # type: ignore
|
||||
username=uuid.uuid4().hex,
|
||||
is_installer_user=True,
|
||||
password=User.objects.make_random_password(60), # type: ignore
|
||||
block_dashboard_login=True,
|
||||
)
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-07 15:26
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0022_urlaction'),
|
||||
('accounts', '0015_user_loading_bar_color'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='url_action',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='user', to='core.urlaction'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='agent_dblclick_action',
|
||||
field=models.CharField(choices=[('editagent', 'Edit Agent'), ('takecontrol', 'Take Control'), ('remotebg', 'Remote Background'), ('urlaction', 'URL Action')], default='editagent', max_length=50),
|
||||
),
|
||||
]
|
||||
173
api/tacticalrmm/accounts/migrations/0017_auto_20210508_1716.py
Normal file
173
api/tacticalrmm/accounts/migrations/0017_auto_20210508_1716.py
Normal file
@@ -0,0 +1,173 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-08 17:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0016_auto_20210507_1526'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_code_sign',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_do_server_maint',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_edit_agent',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_edit_core_settings',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_install_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_accounts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_alerts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_automation_policies',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_autotasks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_checks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_clients',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_deployments',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_notes',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_pendingactions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_procs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_scripts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_sites',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_software',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_winsvcs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_manage_winupdates',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_reboot_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_autotasks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_bulk',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_checks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_run_scripts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_send_cmd',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_uninstall_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_update_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_use_mesh',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_view_auditlogs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_view_debuglogs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='can_view_eventlogs',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
181
api/tacticalrmm/accounts/migrations/0018_auto_20210511_0233.py
Normal file
181
api/tacticalrmm/accounts/migrations/0018_auto_20210511_0233.py
Normal file
@@ -0,0 +1,181 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0017_auto_20210508_1716'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Role',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=255, unique=True)),
|
||||
('is_superuser', models.BooleanField(default=False)),
|
||||
('can_use_mesh', models.BooleanField(default=False)),
|
||||
('can_uninstall_agents', models.BooleanField(default=False)),
|
||||
('can_update_agents', models.BooleanField(default=False)),
|
||||
('can_edit_agent', models.BooleanField(default=False)),
|
||||
('can_manage_procs', models.BooleanField(default=False)),
|
||||
('can_view_eventlogs', models.BooleanField(default=False)),
|
||||
('can_send_cmd', models.BooleanField(default=False)),
|
||||
('can_reboot_agents', models.BooleanField(default=False)),
|
||||
('can_install_agents', models.BooleanField(default=False)),
|
||||
('can_run_scripts', models.BooleanField(default=False)),
|
||||
('can_run_bulk', models.BooleanField(default=False)),
|
||||
('can_manage_notes', models.BooleanField(default=False)),
|
||||
('can_edit_core_settings', models.BooleanField(default=False)),
|
||||
('can_do_server_maint', models.BooleanField(default=False)),
|
||||
('can_code_sign', models.BooleanField(default=False)),
|
||||
('can_manage_checks', models.BooleanField(default=False)),
|
||||
('can_run_checks', models.BooleanField(default=False)),
|
||||
('can_manage_clients', models.BooleanField(default=False)),
|
||||
('can_manage_sites', models.BooleanField(default=False)),
|
||||
('can_manage_deployments', models.BooleanField(default=False)),
|
||||
('can_manage_automation_policies', models.BooleanField(default=False)),
|
||||
('can_manage_autotasks', models.BooleanField(default=False)),
|
||||
('can_run_autotasks', models.BooleanField(default=False)),
|
||||
('can_view_auditlogs', models.BooleanField(default=False)),
|
||||
('can_manage_pendingactions', models.BooleanField(default=False)),
|
||||
('can_view_debuglogs', models.BooleanField(default=False)),
|
||||
('can_manage_scripts', models.BooleanField(default=False)),
|
||||
('can_manage_alerts', models.BooleanField(default=False)),
|
||||
('can_manage_winsvcs', models.BooleanField(default=False)),
|
||||
('can_manage_software', models.BooleanField(default=False)),
|
||||
('can_manage_winupdates', models.BooleanField(default=False)),
|
||||
('can_manage_accounts', models.BooleanField(default=False)),
|
||||
],
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_code_sign',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_do_server_maint',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_edit_agent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_edit_core_settings',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_install_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_accounts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_alerts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_automation_policies',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_autotasks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_checks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_clients',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_deployments',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_notes',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_pendingactions',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_procs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_scripts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_sites',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_software',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_winsvcs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_manage_winupdates',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_reboot_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_autotasks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_bulk',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_checks',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_run_scripts',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_send_cmd',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_uninstall_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_update_agents',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_use_mesh',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_view_auditlogs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_view_debuglogs',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='can_view_eventlogs',
|
||||
),
|
||||
]
|
||||
25
api/tacticalrmm/accounts/migrations/0019_user_role.py
Normal file
25
api/tacticalrmm/accounts/migrations/0019_user_role.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0018_auto_20210511_0233"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="roles",
|
||||
to="accounts.role",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-11 17:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0019_user_role'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_roles',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-17 04:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0020_role_can_manage_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_core_settings',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-28 05:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0021_role_can_view_core_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='clear_search_when_switching',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-30 03:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0022_user_clear_search_when_switching'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='is_installer_user',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-20 20:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0023_user_is_installer_user'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='last_login_ip',
|
||||
field=models.GenericIPAddressField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0024_user_last_login_ip'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-01 12:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0025_auto_20210721_0424'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='APIKey',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('created_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
('modified_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||
('modified_time', models.DateTimeField(auto_now=True, null=True)),
|
||||
('name', models.CharField(max_length=25, unique=True)),
|
||||
('key', models.CharField(blank=True, max_length=48, unique=True)),
|
||||
('expiration', models.DateTimeField(blank=True, default=None, null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_api_keys',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-03 00:54
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0026_auto_20210901_1247'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='apikey',
|
||||
name='user',
|
||||
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_key', to='accounts.user'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='block_dashboard_login',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
150
api/tacticalrmm/accounts/migrations/0028_auto_20211010_0249.py
Normal file
150
api/tacticalrmm/accounts/migrations/0028_auto_20211010_0249.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# Generated by Django 3.2.6 on 2021-10-10 02:49
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0018_auto_20211010_0249'),
|
||||
('accounts', '0027_auto_20210903_0054'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_accounts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_agent_history',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_alerts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_api_keys',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_automation_policies',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_autotasks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_checks',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_clients',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_deployments',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_notes',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_pendingactions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_roles',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_scripts',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_sites',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_software',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_ping_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_recover_agents',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_clients',
|
||||
field=models.ManyToManyField(blank=True, related_name='role_clients', to='clients.Client'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_sites',
|
||||
field=models.ManyToManyField(blank=True, related_name='role_sites', to='clients.Site'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apikey',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apikey',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='role',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='role',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='users', to='accounts.role'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.6 on 2021-10-22 22:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0028_auto_20211010_0249'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_list_alerttemplates',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_alerttemplates',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_run_urlactions',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.6 on 2021-11-04 02:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0029_auto_20211022_2245'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_manage_customfields',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='role',
|
||||
name='can_view_customfields',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
from django.db.models.fields import CharField, DateTimeField
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -7,6 +8,7 @@ AGENT_DBLCLICK_CHOICES = [
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
("urlaction", "URL Action"),
|
||||
]
|
||||
|
||||
AGENT_TBL_TAB_CHOICES = [
|
||||
@@ -23,12 +25,20 @@ CLIENT_TREE_SORT_CHOICES = [
|
||||
|
||||
class User(AbstractUser, BaseAuditModel):
|
||||
is_active = models.BooleanField(default=True)
|
||||
block_dashboard_login = models.BooleanField(default=False)
|
||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||
dark_mode = models.BooleanField(default=True)
|
||||
show_community_scripts = models.BooleanField(default=True)
|
||||
agent_dblclick_action = models.CharField(
|
||||
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||
)
|
||||
url_action = models.ForeignKey(
|
||||
"core.URLAction",
|
||||
related_name="user",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
default_agent_tbl_tab = models.CharField(
|
||||
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||
)
|
||||
@@ -38,6 +48,9 @@ class User(AbstractUser, BaseAuditModel):
|
||||
)
|
||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
clear_search_when_switching = models.BooleanField(default=True)
|
||||
is_installer_user = models.BooleanField(default=False)
|
||||
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
@@ -47,9 +60,141 @@ class User(AbstractUser, BaseAuditModel):
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
role = models.ForeignKey(
|
||||
"accounts.Role",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="users",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def serialize(user):
|
||||
# serializes the task and returns json
|
||||
from .serializers import UserSerializer
|
||||
|
||||
return UserSerializer(user).data
|
||||
|
||||
|
||||
class Role(BaseAuditModel):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
is_superuser = models.BooleanField(default=False)
|
||||
|
||||
# agents
|
||||
can_list_agents = models.BooleanField(default=False)
|
||||
can_ping_agents = models.BooleanField(default=False)
|
||||
can_use_mesh = models.BooleanField(default=False)
|
||||
can_uninstall_agents = models.BooleanField(default=False)
|
||||
can_update_agents = models.BooleanField(default=False)
|
||||
can_edit_agent = models.BooleanField(default=False)
|
||||
can_manage_procs = models.BooleanField(default=False)
|
||||
can_view_eventlogs = models.BooleanField(default=False)
|
||||
can_send_cmd = models.BooleanField(default=False)
|
||||
can_reboot_agents = models.BooleanField(default=False)
|
||||
can_install_agents = models.BooleanField(default=False)
|
||||
can_run_scripts = models.BooleanField(default=False)
|
||||
can_run_bulk = models.BooleanField(default=False)
|
||||
can_recover_agents = models.BooleanField(default=False)
|
||||
can_list_agent_history = models.BooleanField(default=False)
|
||||
|
||||
# core
|
||||
can_list_notes = models.BooleanField(default=False)
|
||||
can_manage_notes = models.BooleanField(default=False)
|
||||
can_view_core_settings = models.BooleanField(default=False)
|
||||
can_edit_core_settings = models.BooleanField(default=False)
|
||||
can_do_server_maint = models.BooleanField(default=False)
|
||||
can_code_sign = models.BooleanField(default=False)
|
||||
can_run_urlactions = models.BooleanField(default=False)
|
||||
can_view_customfields = models.BooleanField(default=False)
|
||||
can_manage_customfields = models.BooleanField(default=False)
|
||||
|
||||
# checks
|
||||
can_list_checks = models.BooleanField(default=False)
|
||||
can_manage_checks = models.BooleanField(default=False)
|
||||
can_run_checks = models.BooleanField(default=False)
|
||||
|
||||
# clients
|
||||
can_list_clients = models.BooleanField(default=False)
|
||||
can_manage_clients = models.BooleanField(default=False)
|
||||
can_list_sites = models.BooleanField(default=False)
|
||||
can_manage_sites = models.BooleanField(default=False)
|
||||
can_list_deployments = models.BooleanField(default=False)
|
||||
can_manage_deployments = models.BooleanField(default=False)
|
||||
can_view_clients = models.ManyToManyField(
|
||||
"clients.Client", related_name="role_clients", blank=True
|
||||
)
|
||||
can_view_sites = models.ManyToManyField(
|
||||
"clients.Site", related_name="role_sites", blank=True
|
||||
)
|
||||
|
||||
# automation
|
||||
can_list_automation_policies = models.BooleanField(default=False)
|
||||
can_manage_automation_policies = models.BooleanField(default=False)
|
||||
|
||||
# automated tasks
|
||||
can_list_autotasks = models.BooleanField(default=False)
|
||||
can_manage_autotasks = models.BooleanField(default=False)
|
||||
can_run_autotasks = models.BooleanField(default=False)
|
||||
|
||||
# logs
|
||||
can_view_auditlogs = models.BooleanField(default=False)
|
||||
can_list_pendingactions = models.BooleanField(default=False)
|
||||
can_manage_pendingactions = models.BooleanField(default=False)
|
||||
can_view_debuglogs = models.BooleanField(default=False)
|
||||
|
||||
# scripts
|
||||
can_list_scripts = models.BooleanField(default=False)
|
||||
can_manage_scripts = models.BooleanField(default=False)
|
||||
|
||||
# alerts
|
||||
can_list_alerts = models.BooleanField(default=False)
|
||||
can_manage_alerts = models.BooleanField(default=False)
|
||||
can_list_alerttemplates = models.BooleanField(default=False)
|
||||
can_manage_alerttemplates = models.BooleanField(default=False)
|
||||
|
||||
# win services
|
||||
can_manage_winsvcs = models.BooleanField(default=False)
|
||||
|
||||
# software
|
||||
can_list_software = models.BooleanField(default=False)
|
||||
can_manage_software = models.BooleanField(default=False)
|
||||
|
||||
# windows updates
|
||||
can_manage_winupdates = models.BooleanField(default=False)
|
||||
|
||||
# accounts
|
||||
can_list_accounts = models.BooleanField(default=False)
|
||||
can_manage_accounts = models.BooleanField(default=False)
|
||||
can_list_roles = models.BooleanField(default=False)
|
||||
can_manage_roles = models.BooleanField(default=False)
|
||||
|
||||
# authentication
|
||||
can_list_api_keys = models.BooleanField(default=False)
|
||||
can_manage_api_keys = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(role):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import RoleAuditSerializer
|
||||
|
||||
return RoleAuditSerializer(role).data
|
||||
|
||||
|
||||
class APIKey(BaseAuditModel):
|
||||
name = CharField(unique=True, max_length=25)
|
||||
key = CharField(unique=True, blank=True, max_length=48)
|
||||
expiration = DateTimeField(blank=True, null=True, default=None)
|
||||
user = models.ForeignKey(
|
||||
"accounts.User",
|
||||
related_name="api_key",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def serialize(apikey):
|
||||
from .serializers import APIKeyAuditSerializer
|
||||
|
||||
return APIKeyAuditSerializer(apikey).data
|
||||
|
||||
43
api/tacticalrmm/accounts/permissions.py
Normal file
43
api/tacticalrmm/accounts/permissions.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm
|
||||
|
||||
|
||||
class AccountsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_accounts")
|
||||
else:
|
||||
|
||||
# allow users to reset their own password/2fa see issue #686
|
||||
base_path = "/accounts/users/"
|
||||
paths = ["reset/", "reset_totp/"]
|
||||
|
||||
if r.path in [base_path + i for i in paths]:
|
||||
from accounts.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(pk=r.data["id"])
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
if user == r.user:
|
||||
return True
|
||||
|
||||
return _has_perm(r, "can_manage_accounts")
|
||||
|
||||
|
||||
class RolesPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_roles")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_roles")
|
||||
|
||||
|
||||
class APIKeyPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_api_keys")
|
||||
|
||||
return _has_perm(r, "can_manage_api_keys")
|
||||
@@ -1,7 +1,11 @@
|
||||
import pyotp
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
SerializerMethodField,
|
||||
ReadOnlyField,
|
||||
)
|
||||
|
||||
from .models import User
|
||||
from .models import APIKey, User, Role
|
||||
|
||||
|
||||
class UserUISerializer(ModelSerializer):
|
||||
@@ -11,17 +15,20 @@ class UserUISerializer(ModelSerializer):
|
||||
"dark_mode",
|
||||
"show_community_scripts",
|
||||
"agent_dblclick_action",
|
||||
"url_action",
|
||||
"default_agent_tbl_tab",
|
||||
"client_tree_sort",
|
||||
"client_tree_splitter",
|
||||
"loading_bar_color",
|
||||
"clear_search_when_switching",
|
||||
"block_dashboard_login",
|
||||
]
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = (
|
||||
fields = [
|
||||
"id",
|
||||
"username",
|
||||
"first_name",
|
||||
@@ -29,7 +36,10 @@ class UserSerializer(ModelSerializer):
|
||||
"email",
|
||||
"is_active",
|
||||
"last_login",
|
||||
)
|
||||
"last_login_ip",
|
||||
"role",
|
||||
"block_dashboard_login",
|
||||
]
|
||||
|
||||
|
||||
class TOTPSetupSerializer(ModelSerializer):
|
||||
@@ -48,3 +58,41 @@ class TOTPSetupSerializer(ModelSerializer):
|
||||
return pyotp.totp.TOTP(obj.totp_key).provisioning_uri(
|
||||
obj.username, issuer_name="Tactical RMM"
|
||||
)
|
||||
|
||||
|
||||
class RoleSerializer(ModelSerializer):
|
||||
user_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
def get_user_count(self, obj):
|
||||
return obj.users.count()
|
||||
|
||||
|
||||
class RoleAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class APIKeySerializer(ModelSerializer):
|
||||
|
||||
username = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = APIKey
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class APIKeyAuditSerializer(ModelSerializer):
|
||||
username = ReadOnlyField(source="user.username")
|
||||
|
||||
class Meta:
|
||||
model = APIKey
|
||||
fields = [
|
||||
"name",
|
||||
"username",
|
||||
"expiration",
|
||||
]
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import override_settings
|
||||
|
||||
from accounts.models import User
|
||||
from model_bakery import baker, seq
|
||||
from accounts.models import User, APIKey
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from accounts.serializers import APIKeySerializer
|
||||
|
||||
|
||||
class TestAccounts(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -25,12 +27,12 @@ class TestAccounts(TacticalTestCase):
|
||||
data = {"username": "bob", "password": "a3asdsa2314"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "bad credentials")
|
||||
self.assertEqual(r.data, "Bad credentials")
|
||||
|
||||
data = {"username": "billy", "password": "hunter2"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "bad credentials")
|
||||
self.assertEqual(r.data, "Bad credentials")
|
||||
|
||||
self.bob.totp_key = "AB5RI6YPFTZAS52G"
|
||||
self.bob.save()
|
||||
@@ -39,6 +41,12 @@ class TestAccounts(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "ok")
|
||||
|
||||
# test user set to block dashboard logins
|
||||
self.bob.block_dashboard_login = True
|
||||
self.bob.save()
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@patch("pyotp.TOTP.verify")
|
||||
def test_login_view(self, mock_verify):
|
||||
url = "/login/"
|
||||
@@ -53,7 +61,7 @@ class TestAccounts(TacticalTestCase):
|
||||
mock_verify.return_value = False
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "bad credentials")
|
||||
self.assertEqual(r.data, "Bad credentials")
|
||||
|
||||
mock_verify.return_value = True
|
||||
data = {"username": "bob", "password": "asd234234asd", "twofactor": "123456"}
|
||||
@@ -280,6 +288,7 @@ class TestUserAction(TacticalTestCase):
|
||||
"client_tree_sort": "alpha",
|
||||
"client_tree_splitter": 14,
|
||||
"loading_bar_color": "green",
|
||||
"clear_search_when_switching": False,
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
@@ -287,6 +296,68 @@ class TestUserAction(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestAPIKeyViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
def test_get_api_keys(self):
|
||||
url = "/accounts/apikeys/"
|
||||
apikeys = baker.make("accounts.APIKey", key=seq("APIKEY"), _quantity=3)
|
||||
|
||||
serializer = APIKeySerializer(apikeys, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_api_keys(self):
|
||||
url = "/accounts/apikeys/"
|
||||
|
||||
user = baker.make("accounts.User")
|
||||
data = {"name": "Name", "user": user.id, "expiration": None}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(APIKey.objects.filter(name="Name").exists())
|
||||
self.assertTrue(APIKey.objects.get(name="Name").key)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_modify_api_key(self):
|
||||
# test a call where api key doesn't exist
|
||||
resp = self.client.put("/accounts/apikeys/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
apikey = baker.make("accounts.APIKey", name="Test")
|
||||
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
|
||||
|
||||
data = {"name": "New Name"} # type: ignore
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
apikey = APIKey.objects.get(pk=apikey.pk) # type: ignore
|
||||
self.assertEquals(apikey.name, "New Name")
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_api_key(self):
|
||||
# test a call where api key doesn't exist
|
||||
resp = self.client.delete("/accounts/apikeys/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete api key
|
||||
apikey = baker.make("accounts.APIKey")
|
||||
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
|
||||
class TestTOTPSetup(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
@@ -312,3 +383,29 @@ class TestTOTPSetup(TacticalTestCase):
|
||||
r = self.client.post(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "totp token already set")
|
||||
|
||||
|
||||
class TestAPIAuthentication(TacticalTestCase):
|
||||
def setUp(self):
|
||||
# create User and associate to API Key
|
||||
self.user = User.objects.create(username="api_user", is_superuser=True)
|
||||
self.api_key = APIKey.objects.create(
|
||||
name="Test Token", key="123456", user=self.user
|
||||
)
|
||||
|
||||
self.client_setup()
|
||||
|
||||
def test_api_auth(self):
|
||||
url = "/clients/"
|
||||
# auth should fail if no header set
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
# invalid api key in header should return code 400
|
||||
self.client.credentials(HTTP_X_API_KEY="000000")
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 401)
|
||||
|
||||
# valid api key in header should return code 200
|
||||
self.client.credentials(HTTP_X_API_KEY="123456")
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -9,4 +9,8 @@ urlpatterns = [
|
||||
path("users/reset_totp/", views.UserActions.as_view()),
|
||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||
path("users/ui/", views.UserUI.as_view()),
|
||||
path("roles/", views.GetAddRoles.as_view()),
|
||||
path("roles/<int:pk>/", views.GetUpdateDeleteRole.as_view()),
|
||||
path("apikeys/", views.GetAddAPIKeys.as_view()),
|
||||
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
|
||||
]
|
||||
|
||||
@@ -3,26 +3,36 @@ from django.conf import settings
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
from django.shortcuts import get_object_or_404
|
||||
from ipware import get_client_ip
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from rest_framework import status
|
||||
from logs.models import AuditLog
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import User
|
||||
from .serializers import TOTPSetupSerializer, UserSerializer, UserUISerializer
|
||||
from .models import APIKey, Role, User
|
||||
from .permissions import APIKeyPerms, AccountsPerms, RolesPerms
|
||||
from .serializers import (
|
||||
APIKeySerializer,
|
||||
RoleSerializer,
|
||||
TOTPSetupSerializer,
|
||||
UserSerializer,
|
||||
UserUISerializer,
|
||||
)
|
||||
|
||||
|
||||
def _is_root_user(request, user) -> bool:
|
||||
return (
|
||||
root = (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
)
|
||||
demo = (
|
||||
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
||||
)
|
||||
return root or demo
|
||||
|
||||
|
||||
class CheckCreds(KnoxLoginView):
|
||||
@@ -34,11 +44,16 @@ class CheckCreds(KnoxLoginView):
|
||||
# check credentials
|
||||
serializer = AuthTokenSerializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
AuditLog.audit_user_failed_login(request.data["username"])
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
AuditLog.audit_user_failed_login(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
# if totp token not set modify response to notify frontend
|
||||
if not user.totp_key:
|
||||
login(request, user)
|
||||
@@ -60,26 +75,50 @@ class LoginView(KnoxLoginView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
user = serializer.validated_data["user"]
|
||||
|
||||
if user.block_dashboard_login:
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
token = request.data["twofactor"]
|
||||
totp = pyotp.TOTP(user.totp_key)
|
||||
|
||||
if settings.DEBUG and token == "sekret":
|
||||
valid = True
|
||||
elif getattr(settings, "DEMO", False):
|
||||
valid = True
|
||||
elif totp.verify(token, valid_window=10):
|
||||
valid = True
|
||||
|
||||
if valid:
|
||||
login(request, user)
|
||||
AuditLog.audit_user_login_successful(request.data["username"])
|
||||
|
||||
# save ip information
|
||||
client_ip, is_routable = get_client_ip(request)
|
||||
user.last_login_ip = client_ip
|
||||
user.save()
|
||||
|
||||
AuditLog.audit_user_login_successful(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return super(LoginView, self).post(request, format=None)
|
||||
else:
|
||||
AuditLog.audit_user_failed_twofactor(request.data["username"])
|
||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
||||
AuditLog.audit_user_failed_twofactor(
|
||||
request.data["username"], debug_info={"ip": request._client_ip}
|
||||
)
|
||||
return notify_error("Bad credentials")
|
||||
|
||||
|
||||
class GetAddUsers(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def get(self, request):
|
||||
users = User.objects.filter(agent=None)
|
||||
search = request.GET.get("search", None)
|
||||
|
||||
if search:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False).filter(
|
||||
username__icontains=search
|
||||
)
|
||||
else:
|
||||
users = User.objects.filter(agent=None, is_installer_user=False)
|
||||
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
@@ -96,15 +135,21 @@ class GetAddUsers(APIView):
|
||||
f"ERROR: User {request.data['username']} already exists!"
|
||||
)
|
||||
|
||||
user.first_name = request.data["first_name"]
|
||||
user.last_name = request.data["last_name"]
|
||||
# Can be changed once permissions and groups are introduced
|
||||
user.is_superuser = True
|
||||
if "first_name" in request.data.keys():
|
||||
user.first_name = request.data["first_name"]
|
||||
if "last_name" in request.data.keys():
|
||||
user.last_name = request.data["last_name"]
|
||||
if "role" in request.data.keys() and isinstance(request.data["role"], int):
|
||||
role = get_object_or_404(Role, pk=request.data["role"])
|
||||
user.role = role
|
||||
|
||||
user.save()
|
||||
return Response(user.username)
|
||||
|
||||
|
||||
class GetUpdateDeleteUser(APIView):
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
|
||||
@@ -133,7 +178,7 @@ class GetUpdateDeleteUser(APIView):
|
||||
|
||||
|
||||
class UserActions(APIView):
|
||||
|
||||
permission_classes = [IsAuthenticated, AccountsPerms]
|
||||
# reset password
|
||||
def post(self, request):
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
@@ -182,3 +227,76 @@ class UserUI(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetAddRoles(APIView):
|
||||
permission_classes = [IsAuthenticated, RolesPerms]
|
||||
|
||||
def get(self, request):
|
||||
roles = Role.objects.all()
|
||||
return Response(RoleSerializer(roles, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
serializer = RoleSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("Role was added")
|
||||
|
||||
|
||||
class GetUpdateDeleteRole(APIView):
|
||||
permission_classes = [IsAuthenticated, RolesPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
return Response(RoleSerializer(role).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
serializer = RoleSerializer(instance=role, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("Role was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
role = get_object_or_404(Role, pk=pk)
|
||||
role.delete()
|
||||
return Response("Role was removed")
|
||||
|
||||
|
||||
class GetAddAPIKeys(APIView):
|
||||
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||
|
||||
def get(self, request):
|
||||
apikeys = APIKey.objects.all()
|
||||
return Response(APIKeySerializer(apikeys, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# generate a random API Key
|
||||
from django.utils.crypto import get_random_string
|
||||
|
||||
request.data["key"] = get_random_string(length=32).upper()
|
||||
serializer = APIKeySerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
return Response("The API Key was added")
|
||||
|
||||
|
||||
class GetUpdateDeleteAPIKey(APIView):
|
||||
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||
|
||||
def put(self, request, pk):
|
||||
apikey = get_object_or_404(APIKey, pk=pk)
|
||||
|
||||
# remove API key is present in request data
|
||||
if "key" in request.data.keys():
|
||||
request.data.pop("key")
|
||||
|
||||
serializer = APIKeySerializer(instance=apikey, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("The API Key was edited")
|
||||
|
||||
def delete(self, request, pk):
|
||||
apikey = get_object_or_404(APIKey, pk=pk)
|
||||
apikey.delete()
|
||||
return Response("The API Key was deleted")
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
admin.site.register(AgentCustomField)
|
||||
admin.site.register(AgentHistory)
|
||||
|
||||
@@ -30,7 +30,8 @@ agent = Recipe(
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.3.0",
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
agent_id=seq("asdkj3h4234-1234hg3h4g34-234jjh34|DESKTOP-TEST123"),
|
||||
agent_id=seq(generate_agent_id("DESKTOP-TEST123")),
|
||||
last_seen=djangotime.now() - djangotime.timedelta(days=5),
|
||||
)
|
||||
|
||||
server_agent = agent.extend(
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
import asyncio
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.utils import AGENT_DEFER, reload_nats
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Delete old agents"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--days",
|
||||
type=int,
|
||||
help="Delete agents that have not checked in for this many days",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--agentver",
|
||||
type=str,
|
||||
help="Delete agents that equal to or less than this version",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
help="This will delete agents",
|
||||
)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
days = kwargs["days"]
|
||||
agentver = kwargs["agentver"]
|
||||
delete = kwargs["delete"]
|
||||
|
||||
if not days and not agentver:
|
||||
self.stdout.write(
|
||||
self.style.ERROR("Must have at least one parameter: days or agentver")
|
||||
)
|
||||
return
|
||||
|
||||
q = Agent.objects.defer(*AGENT_DEFER)
|
||||
|
||||
agents = []
|
||||
if days:
|
||||
overdue = djangotime.now() - djangotime.timedelta(days=days)
|
||||
agents = [i for i in q if i.last_seen < overdue]
|
||||
|
||||
if agentver:
|
||||
agents = [i for i in q if pyver.parse(i.version) <= pyver.parse(agentver)]
|
||||
|
||||
if not agents:
|
||||
self.stdout.write(self.style.ERROR("No agents matched"))
|
||||
return
|
||||
|
||||
deleted_count = 0
|
||||
for agent in agents:
|
||||
s = f"{agent.hostname} | Version {agent.version} | Last Seen {agent.last_seen} | {agent.client} > {agent.site}"
|
||||
if delete:
|
||||
s = "Deleting " + s
|
||||
self.stdout.write(self.style.SUCCESS(s))
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
try:
|
||||
agent.delete()
|
||||
except Exception as e:
|
||||
err = f"Failed to delete agent {agent.hostname}: {str(e)}"
|
||||
self.stdout.write(self.style.ERROR(err))
|
||||
else:
|
||||
deleted_count += 1
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(s))
|
||||
|
||||
if delete:
|
||||
reload_nats()
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {deleted_count} agents"))
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"The above agents would be deleted. Run again with --delete to actually delete them."
|
||||
)
|
||||
)
|
||||
36
api/tacticalrmm/agents/management/commands/demo_cron.py
Normal file
36
api/tacticalrmm/agents/management/commands/demo_cron.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# import datetime as dt
|
||||
import random
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "stuff for demo site in cron"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
random_dates = []
|
||||
now = djangotime.now()
|
||||
|
||||
for _ in range(20):
|
||||
rand = now - djangotime.timedelta(minutes=random.randint(1, 2))
|
||||
random_dates.append(rand)
|
||||
|
||||
for _ in range(5):
|
||||
rand = now - djangotime.timedelta(minutes=random.randint(10, 20))
|
||||
random_dates.append(rand)
|
||||
|
||||
""" for _ in range(5):
|
||||
rand = djangotime.now() - djangotime.timedelta(hours=random.randint(1, 10))
|
||||
random_dates.append(rand)
|
||||
|
||||
for _ in range(5):
|
||||
rand = djangotime.now() - djangotime.timedelta(days=random.randint(40, 90))
|
||||
random_dates.append(rand) """
|
||||
|
||||
agents = Agent.objects.only("last_seen")
|
||||
for agent in agents:
|
||||
agent.last_seen = random.choice(random_dates)
|
||||
agent.save(update_fields=["last_seen"])
|
||||
668
api/tacticalrmm/agents/management/commands/fake_agents.py
Normal file
668
api/tacticalrmm/agents/management/commands/fake_agents.py
Normal file
@@ -0,0 +1,668 @@
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
from django.conf import settings
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentHistory
|
||||
from clients.models import Client, Site
|
||||
from software.models import InstalledSoftware
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
from checks.models import Check, CheckHistory
|
||||
from scripts.models import Script
|
||||
from autotasks.models import AutomatedTask
|
||||
from automation.models import Policy
|
||||
from logs.models import PendingAction, AuditLog
|
||||
|
||||
from tacticalrmm.demo_data import (
|
||||
disks,
|
||||
temp_dir_stdout,
|
||||
spooler_stdout,
|
||||
ping_fail_output,
|
||||
ping_success_output,
|
||||
)
|
||||
|
||||
AGENTS_TO_GENERATE = 250
|
||||
|
||||
SVCS = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
|
||||
WMI_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi1.json")
|
||||
WMI_2 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi2.json")
|
||||
WMI_3 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi3.json")
|
||||
SW_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/software1.json")
|
||||
SW_2 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/software2.json")
|
||||
WIN_UPDATES = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winupdates.json")
|
||||
EVT_LOG_FAIL = settings.BASE_DIR.joinpath(
|
||||
"tacticalrmm/test_data/eventlog_check_fail.json"
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "populate database with fake agents"
|
||||
|
||||
def rand_string(self, length):
|
||||
chars = string.ascii_letters
|
||||
return "".join(random.choice(chars) for _ in range(length))
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
user = User.objects.first()
|
||||
user.totp_key = "ABSA234234"
|
||||
user.save(update_fields=["totp_key"])
|
||||
|
||||
Client.objects.all().delete()
|
||||
Agent.objects.all().delete()
|
||||
Check.objects.all().delete()
|
||||
Script.objects.all().delete()
|
||||
AutomatedTask.objects.all().delete()
|
||||
CheckHistory.objects.all().delete()
|
||||
Policy.objects.all().delete()
|
||||
AuditLog.objects.all().delete()
|
||||
PendingAction.objects.all().delete()
|
||||
|
||||
Script.load_community_scripts()
|
||||
|
||||
# policies
|
||||
check_policy = Policy()
|
||||
check_policy.name = "Demo Checks Policy"
|
||||
check_policy.desc = "Demo Checks Policy"
|
||||
check_policy.active = True
|
||||
check_policy.enforced = True
|
||||
check_policy.save()
|
||||
|
||||
patch_policy = Policy()
|
||||
patch_policy.name = "Demo Patch Policy"
|
||||
patch_policy.desc = "Demo Patch Policy"
|
||||
patch_policy.active = True
|
||||
patch_policy.enforced = True
|
||||
patch_policy.save()
|
||||
|
||||
update_policy = WinUpdatePolicy()
|
||||
update_policy.policy = patch_policy
|
||||
update_policy.critical = "approve"
|
||||
update_policy.important = "approve"
|
||||
update_policy.moderate = "approve"
|
||||
update_policy.low = "ignore"
|
||||
update_policy.other = "ignore"
|
||||
update_policy.run_time_days = [6, 0, 2]
|
||||
update_policy.run_time_day = 1
|
||||
update_policy.reboot_after_install = "required"
|
||||
update_policy.reprocess_failed = True
|
||||
update_policy.email_if_fail = True
|
||||
update_policy.save()
|
||||
|
||||
clients = [
|
||||
"Company 2",
|
||||
"Company 3",
|
||||
"Company 1",
|
||||
"Company 4",
|
||||
"Company 5",
|
||||
"Company 6",
|
||||
]
|
||||
sites1 = ["HQ1", "LA Office 1", "NY Office 1"]
|
||||
sites2 = ["HQ2", "LA Office 2", "NY Office 2"]
|
||||
sites3 = ["HQ3", "LA Office 3", "NY Office 3"]
|
||||
sites4 = ["HQ4", "LA Office 4", "NY Office 4"]
|
||||
sites5 = ["HQ5", "LA Office 5", "NY Office 5"]
|
||||
sites6 = ["HQ6", "LA Office 6", "NY Office 6"]
|
||||
|
||||
client1 = Client(name="Company 1")
|
||||
client2 = Client(name="Company 2")
|
||||
client3 = Client(name="Company 3")
|
||||
client4 = Client(name="Company 4")
|
||||
client5 = Client(name="Company 5")
|
||||
client6 = Client(name="Company 6")
|
||||
|
||||
client1.save()
|
||||
client2.save()
|
||||
client3.save()
|
||||
client4.save()
|
||||
client5.save()
|
||||
client6.save()
|
||||
|
||||
for site in sites1:
|
||||
Site(client=client1, name=site).save()
|
||||
|
||||
for site in sites2:
|
||||
Site(client=client2, name=site).save()
|
||||
|
||||
for site in sites3:
|
||||
Site(client=client3, name=site).save()
|
||||
|
||||
for site in sites4:
|
||||
Site(client=client4, name=site).save()
|
||||
|
||||
for site in sites5:
|
||||
Site(client=client5, name=site).save()
|
||||
|
||||
for site in sites6:
|
||||
Site(client=client6, name=site).save()
|
||||
|
||||
hostnames = [
|
||||
"DC-1",
|
||||
"DC-2",
|
||||
"FSV-1",
|
||||
"FSV-2",
|
||||
"WSUS",
|
||||
"DESKTOP-12345",
|
||||
"LAPTOP-55443",
|
||||
]
|
||||
descriptions = ["Bob's computer", "Primary DC", "File Server", "Karen's Laptop"]
|
||||
modes = ["server", "workstation"]
|
||||
op_systems_servers = [
|
||||
"Microsoft Windows Server 2016 Standard, 64bit (build 14393)",
|
||||
"Microsoft Windows Server 2012 R2 Standard, 64bit (build 9600)",
|
||||
"Microsoft Windows Server 2019 Standard, 64bit (build 17763)",
|
||||
]
|
||||
|
||||
op_systems_workstations = [
|
||||
"Microsoft Windows 8.1 Pro, 64bit (build 9600)",
|
||||
"Microsoft Windows 10 Pro for Workstations, 64bit (build 18363)",
|
||||
"Microsoft Windows 10 Pro, 64bit (build 18363)",
|
||||
]
|
||||
|
||||
public_ips = ["65.234.22.4", "74.123.43.5", "44.21.134.45"]
|
||||
|
||||
total_rams = [4, 8, 16, 32, 64, 128]
|
||||
used_rams = [10, 13, 60, 25, 76, 34, 56, 34, 39]
|
||||
|
||||
now = dt.datetime.now()
|
||||
|
||||
boot_times = []
|
||||
|
||||
for _ in range(15):
|
||||
rand_hour = now - dt.timedelta(hours=random.randint(1, 22))
|
||||
boot_times.append(str(rand_hour.timestamp()))
|
||||
|
||||
for _ in range(5):
|
||||
rand_days = now - dt.timedelta(days=random.randint(2, 50))
|
||||
boot_times.append(str(rand_days.timestamp()))
|
||||
|
||||
user_names = ["None", "Karen", "Steve", "jsmith", "jdoe"]
|
||||
|
||||
with open(SVCS) as f:
|
||||
services = json.load(f)
|
||||
|
||||
# WMI
|
||||
with open(WMI_1) as f:
|
||||
wmi1 = json.load(f)
|
||||
|
||||
with open(WMI_2) as f:
|
||||
wmi2 = json.load(f)
|
||||
|
||||
with open(WMI_3) as f:
|
||||
wmi3 = json.load(f)
|
||||
|
||||
wmi_details = []
|
||||
wmi_details.append(wmi1)
|
||||
wmi_details.append(wmi2)
|
||||
wmi_details.append(wmi3)
|
||||
|
||||
# software
|
||||
with open(SW_1) as f:
|
||||
software1 = json.load(f)
|
||||
|
||||
with open(SW_2) as f:
|
||||
software2 = json.load(f)
|
||||
|
||||
softwares = []
|
||||
softwares.append(software1)
|
||||
softwares.append(software2)
|
||||
|
||||
# windows updates
|
||||
with open(WIN_UPDATES) as f:
|
||||
windows_updates = json.load(f)["samplecomputer"]
|
||||
|
||||
# event log check fail data
|
||||
with open(EVT_LOG_FAIL) as f:
|
||||
eventlog_check_fail_data = json.load(f)
|
||||
|
||||
# create scripts
|
||||
|
||||
clear_spool = Script()
|
||||
clear_spool.name = "Clear Print Spooler"
|
||||
clear_spool.description = "clears the print spooler. Fuck printers"
|
||||
clear_spool.filename = "clear_print_spool.bat"
|
||||
clear_spool.shell = "cmd"
|
||||
clear_spool.save()
|
||||
|
||||
check_net_aware = Script()
|
||||
check_net_aware.name = "Check Network Location Awareness"
|
||||
check_net_aware.description = "Check's network location awareness on domain computers, should always be domain profile and not public or private. Sometimes happens when computer restarts before domain available. This script will return 0 if check passes or 1 if it fails."
|
||||
check_net_aware.filename = "check_network_loc_aware.ps1"
|
||||
check_net_aware.shell = "powershell"
|
||||
check_net_aware.save()
|
||||
|
||||
check_pool_health = Script()
|
||||
check_pool_health.name = "Check storage spool health"
|
||||
check_pool_health.description = "loops through all storage pools and will fail if any of them are not healthy"
|
||||
check_pool_health.filename = "check_storage_pool_health.ps1"
|
||||
check_pool_health.shell = "powershell"
|
||||
check_pool_health.save()
|
||||
|
||||
restart_nla = Script()
|
||||
restart_nla.name = "Restart NLA Service"
|
||||
restart_nla.description = "restarts the Network Location Awareness windows service to fix the nic profile. Run this after the check network service fails"
|
||||
restart_nla.filename = "restart_nla.ps1"
|
||||
restart_nla.shell = "powershell"
|
||||
restart_nla.save()
|
||||
|
||||
show_tmp_dir_script = Script()
|
||||
show_tmp_dir_script.name = "Check temp dir"
|
||||
show_tmp_dir_script.description = "shows files in temp dir using python"
|
||||
show_tmp_dir_script.filename = "show_temp_dir.py"
|
||||
show_tmp_dir_script.shell = "python"
|
||||
show_tmp_dir_script.save()
|
||||
|
||||
for count_agents in range(AGENTS_TO_GENERATE):
|
||||
|
||||
client = random.choice(clients)
|
||||
|
||||
if client == "Company 1":
|
||||
site = random.choice(sites1)
|
||||
elif client == "Company 2":
|
||||
site = random.choice(sites2)
|
||||
elif client == "Company 3":
|
||||
site = random.choice(sites3)
|
||||
elif client == "Company 4":
|
||||
site = random.choice(sites4)
|
||||
elif client == "Company 5":
|
||||
site = random.choice(sites5)
|
||||
elif client == "Company 6":
|
||||
site = random.choice(sites6)
|
||||
|
||||
agent = Agent()
|
||||
|
||||
mode = random.choice(modes)
|
||||
if mode == "server":
|
||||
agent.operating_system = random.choice(op_systems_servers)
|
||||
else:
|
||||
agent.operating_system = random.choice(op_systems_workstations)
|
||||
|
||||
agent.hostname = random.choice(hostnames)
|
||||
agent.version = settings.LATEST_AGENT_VER
|
||||
agent.salt_ver = "1.1.0"
|
||||
agent.site = Site.objects.get(name=site)
|
||||
agent.agent_id = self.rand_string(25)
|
||||
agent.description = random.choice(descriptions)
|
||||
agent.monitoring_type = mode
|
||||
agent.public_ip = random.choice(public_ips)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.plat = "windows"
|
||||
agent.plat_release = "windows-2019Server"
|
||||
agent.total_ram = random.choice(total_rams)
|
||||
agent.used_ram = random.choice(used_rams)
|
||||
agent.boot_time = random.choice(boot_times)
|
||||
agent.logged_in_username = random.choice(user_names)
|
||||
agent.antivirus = "windowsdefender"
|
||||
agent.mesh_node_id = (
|
||||
"3UiLhe420@kaVQ0rswzBeonW$WY0xrFFUDBQlcYdXoriLXzvPmBpMrV99vRHXFlb"
|
||||
)
|
||||
agent.overdue_email_alert = random.choice([True, False])
|
||||
agent.overdue_text_alert = random.choice([True, False])
|
||||
agent.needs_reboot = random.choice([True, False])
|
||||
agent.wmi_detail = random.choice(wmi_details)
|
||||
agent.services = services
|
||||
agent.disks = random.choice(disks)
|
||||
agent.salt_id = "not-used"
|
||||
|
||||
agent.save()
|
||||
|
||||
InstalledSoftware(agent=agent, software=random.choice(softwares)).save()
|
||||
|
||||
if mode == "workstation":
|
||||
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
|
||||
else:
|
||||
WinUpdatePolicy(agent=agent).save()
|
||||
|
||||
# windows updates load
|
||||
guids = []
|
||||
for k in windows_updates.keys():
|
||||
guids.append(k)
|
||||
|
||||
for i in guids:
|
||||
WinUpdate(
|
||||
agent=agent,
|
||||
guid=i,
|
||||
kb=windows_updates[i]["KBs"][0],
|
||||
mandatory=windows_updates[i]["Mandatory"],
|
||||
title=windows_updates[i]["Title"],
|
||||
needs_reboot=windows_updates[i]["NeedsReboot"],
|
||||
installed=windows_updates[i]["Installed"],
|
||||
downloaded=windows_updates[i]["Downloaded"],
|
||||
description=windows_updates[i]["Description"],
|
||||
severity=windows_updates[i]["Severity"],
|
||||
).save()
|
||||
|
||||
# agent histories
|
||||
hist = AgentHistory()
|
||||
hist.agent = agent
|
||||
hist.type = "cmd_run"
|
||||
hist.command = "ping google.com"
|
||||
hist.username = "demo"
|
||||
hist.results = ping_success_output
|
||||
hist.save()
|
||||
|
||||
hist1 = AgentHistory()
|
||||
hist1.agent = agent
|
||||
hist1.type = "script_run"
|
||||
hist1.script = clear_spool
|
||||
hist1.script_results = {
|
||||
"id": 1,
|
||||
"stderr": "",
|
||||
"stdout": spooler_stdout,
|
||||
"execution_time": 3.5554593,
|
||||
"retcode": 0,
|
||||
}
|
||||
hist1.save()
|
||||
|
||||
# disk space check
|
||||
check1 = Check()
|
||||
check1.agent = agent
|
||||
check1.check_type = "diskspace"
|
||||
check1.status = "passing"
|
||||
check1.last_run = djangotime.now()
|
||||
check1.more_info = "Total: 498.7GB, Free: 287.4GB"
|
||||
check1.warning_threshold = 25
|
||||
check1.error_threshold = 10
|
||||
check1.disk = "C:"
|
||||
check1.email_alert = random.choice([True, False])
|
||||
check1.text_alert = random.choice([True, False])
|
||||
check1.save()
|
||||
|
||||
for i in range(30):
|
||||
check1_history = CheckHistory()
|
||||
check1_history.check_id = check1.id
|
||||
check1_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
check1_history.y = random.randint(13, 40)
|
||||
check1_history.save()
|
||||
|
||||
# ping check
|
||||
check2 = Check()
|
||||
check2.agent = agent
|
||||
check2.check_type = "ping"
|
||||
check2.last_run = djangotime.now()
|
||||
check2.email_alert = random.choice([True, False])
|
||||
check2.text_alert = random.choice([True, False])
|
||||
|
||||
if site in sites5:
|
||||
check2.name = "Synology NAS"
|
||||
check2.status = "failing"
|
||||
check2.ip = "172.17.14.26"
|
||||
check2.more_info = ping_fail_output
|
||||
else:
|
||||
check2.name = "Google"
|
||||
check2.status = "passing"
|
||||
check2.ip = "8.8.8.8"
|
||||
check2.more_info = ping_success_output
|
||||
|
||||
check2.save()
|
||||
|
||||
for i in range(30):
|
||||
check2_history = CheckHistory()
|
||||
check2_history.check_id = check2.id
|
||||
check2_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
if site in sites5:
|
||||
check2_history.y = 1
|
||||
check2_history.results = ping_fail_output
|
||||
else:
|
||||
check2_history.y = 0
|
||||
check2_history.results = ping_success_output
|
||||
check2_history.save()
|
||||
|
||||
# cpu load check
|
||||
check3 = Check()
|
||||
check3.agent = agent
|
||||
check3.check_type = "cpuload"
|
||||
check3.status = "passing"
|
||||
check3.last_run = djangotime.now()
|
||||
check3.warning_threshold = 70
|
||||
check3.error_threshold = 90
|
||||
check3.history = [15, 23, 16, 22, 22, 27, 15, 23, 23, 20, 10, 10, 13, 34]
|
||||
check3.email_alert = random.choice([True, False])
|
||||
check3.text_alert = random.choice([True, False])
|
||||
check3.save()
|
||||
|
||||
for i in range(30):
|
||||
check3_history = CheckHistory()
|
||||
check3_history.check_id = check3.id
|
||||
check3_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
check3_history.y = random.randint(2, 79)
|
||||
check3_history.save()
|
||||
|
||||
# memory check
|
||||
check4 = Check()
|
||||
check4.agent = agent
|
||||
check4.check_type = "memory"
|
||||
check4.status = "passing"
|
||||
check4.warning_threshold = 70
|
||||
check4.error_threshold = 85
|
||||
check4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34]
|
||||
check4.email_alert = random.choice([True, False])
|
||||
check4.text_alert = random.choice([True, False])
|
||||
check4.save()
|
||||
|
||||
for i in range(30):
|
||||
check4_history = CheckHistory()
|
||||
check4_history.check_id = check4.id
|
||||
check4_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
check4_history.y = random.randint(2, 79)
|
||||
check4_history.save()
|
||||
|
||||
# script check storage pool
|
||||
check5 = Check()
|
||||
check5.agent = agent
|
||||
check5.check_type = "script"
|
||||
check5.status = "passing"
|
||||
check5.last_run = djangotime.now()
|
||||
check5.email_alert = random.choice([True, False])
|
||||
check5.text_alert = random.choice([True, False])
|
||||
check5.timeout = 120
|
||||
check5.retcode = 0
|
||||
check5.execution_time = "4.0000"
|
||||
check5.script = check_pool_health
|
||||
check5.save()
|
||||
|
||||
for i in range(30):
|
||||
check5_history = CheckHistory()
|
||||
check5_history.check_id = check5.id
|
||||
check5_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
if i == 10 or i == 18:
|
||||
check5_history.y = 1
|
||||
else:
|
||||
check5_history.y = 0
|
||||
check5_history.save()
|
||||
|
||||
check6 = Check()
|
||||
check6.agent = agent
|
||||
check6.check_type = "script"
|
||||
check6.status = "passing"
|
||||
check6.last_run = djangotime.now()
|
||||
check6.email_alert = random.choice([True, False])
|
||||
check6.text_alert = random.choice([True, False])
|
||||
check6.timeout = 120
|
||||
check6.retcode = 0
|
||||
check6.execution_time = "4.0000"
|
||||
check6.script = check_net_aware
|
||||
check6.save()
|
||||
|
||||
for i in range(30):
|
||||
check6_history = CheckHistory()
|
||||
check6_history.check_id = check6.id
|
||||
check6_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
check6_history.y = 0
|
||||
check6_history.save()
|
||||
|
||||
nla_task = AutomatedTask()
|
||||
nla_task.agent = agent
|
||||
nla_task.script = restart_nla
|
||||
nla_task.assigned_check = check6
|
||||
nla_task.name = "Restart NLA"
|
||||
nla_task.task_type = "checkfailure"
|
||||
nla_task.win_task_name = "demotask123"
|
||||
nla_task.execution_time = "1.8443"
|
||||
nla_task.last_run = djangotime.now()
|
||||
nla_task.stdout = "no stdout"
|
||||
nla_task.retcode = 0
|
||||
nla_task.sync_status = "synced"
|
||||
nla_task.save()
|
||||
|
||||
spool_task = AutomatedTask()
|
||||
spool_task.agent = agent
|
||||
spool_task.script = clear_spool
|
||||
spool_task.name = "Clear the print spooler"
|
||||
spool_task.task_type = "scheduled"
|
||||
spool_task.run_time_bit_weekdays = 127
|
||||
spool_task.run_time_minute = "04:45"
|
||||
spool_task.win_task_name = "demospool123"
|
||||
spool_task.last_run = djangotime.now()
|
||||
spool_task.retcode = 0
|
||||
spool_task.stdout = spooler_stdout
|
||||
spool_task.sync_status = "synced"
|
||||
spool_task.save()
|
||||
|
||||
tmp_dir_task = AutomatedTask()
|
||||
tmp_dir_task.agent = agent
|
||||
tmp_dir_task.name = "show temp dir files"
|
||||
tmp_dir_task.script = show_tmp_dir_script
|
||||
tmp_dir_task.task_type = "manual"
|
||||
tmp_dir_task.win_task_name = "demotemp"
|
||||
tmp_dir_task.last_run = djangotime.now()
|
||||
tmp_dir_task.stdout = temp_dir_stdout
|
||||
tmp_dir_task.retcode = 0
|
||||
tmp_dir_task.sync_status = "synced"
|
||||
tmp_dir_task.save()
|
||||
|
||||
check7 = Check()
|
||||
check7.agent = agent
|
||||
check7.check_type = "script"
|
||||
check7.status = "passing"
|
||||
check7.last_run = djangotime.now()
|
||||
check7.email_alert = random.choice([True, False])
|
||||
check7.text_alert = random.choice([True, False])
|
||||
check7.timeout = 120
|
||||
check7.retcode = 0
|
||||
check7.execution_time = "3.1337"
|
||||
check7.script = clear_spool
|
||||
check7.stdout = spooler_stdout
|
||||
check7.save()
|
||||
|
||||
for i in range(30):
|
||||
check7_history = CheckHistory()
|
||||
check7_history.check_id = check7.id
|
||||
check7_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
check7_history.y = 0
|
||||
check7_history.save()
|
||||
|
||||
check8 = Check()
|
||||
check8.agent = agent
|
||||
check8.check_type = "winsvc"
|
||||
check8.status = "passing"
|
||||
check8.last_run = djangotime.now()
|
||||
check8.email_alert = random.choice([True, False])
|
||||
check8.text_alert = random.choice([True, False])
|
||||
check8.more_info = "Status RUNNING"
|
||||
check8.fails_b4_alert = 4
|
||||
check8.svc_name = "Spooler"
|
||||
check8.svc_display_name = "Print Spooler"
|
||||
check8.pass_if_start_pending = False
|
||||
check8.restart_if_stopped = True
|
||||
check8.save()
|
||||
|
||||
for i in range(30):
|
||||
check8_history = CheckHistory()
|
||||
check8_history.check_id = check8.id
|
||||
check8_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
if i == 10 or i == 18:
|
||||
check8_history.y = 1
|
||||
check8_history.results = "Status STOPPED"
|
||||
else:
|
||||
check8_history.y = 0
|
||||
check8_history.results = "Status RUNNING"
|
||||
check8_history.save()
|
||||
|
||||
check9 = Check()
|
||||
check9.agent = agent
|
||||
check9.check_type = "eventlog"
|
||||
check9.name = "unexpected shutdown"
|
||||
|
||||
check9.last_run = djangotime.now()
|
||||
check9.email_alert = random.choice([True, False])
|
||||
check9.text_alert = random.choice([True, False])
|
||||
check9.fails_b4_alert = 2
|
||||
|
||||
if site in sites5:
|
||||
check9.extra_details = eventlog_check_fail_data
|
||||
check9.status = "failing"
|
||||
else:
|
||||
check9.extra_details = {"log": []}
|
||||
check9.status = "passing"
|
||||
|
||||
check9.log_name = "Application"
|
||||
check9.event_id = 1001
|
||||
check9.event_type = "INFO"
|
||||
check9.fail_when = "contains"
|
||||
check9.search_last_days = 30
|
||||
|
||||
check9.save()
|
||||
|
||||
for i in range(30):
|
||||
check9_history = CheckHistory()
|
||||
check9_history.check_id = check9.id
|
||||
check9_history.x = djangotime.now() - djangotime.timedelta(
|
||||
minutes=i * 2
|
||||
)
|
||||
if i == 10 or i == 18:
|
||||
check9_history.y = 1
|
||||
check9_history.results = "Events Found: 16"
|
||||
else:
|
||||
check9_history.y = 0
|
||||
check9_history.results = "Events Found: 0"
|
||||
check9_history.save()
|
||||
|
||||
pick = random.randint(1, 10)
|
||||
|
||||
if pick == 5 or pick == 3:
|
||||
|
||||
reboot_time = djangotime.now() + djangotime.timedelta(
|
||||
minutes=random.randint(1000, 500000)
|
||||
)
|
||||
date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M")
|
||||
|
||||
obj = dt.datetime.strptime(date_obj, "%Y-%m-%d %H:%M")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
sched_reboot = PendingAction()
|
||||
sched_reboot.agent = agent
|
||||
sched_reboot.action_type = "schedreboot"
|
||||
sched_reboot.details = {
|
||||
"time": str(obj),
|
||||
"taskname": task_name,
|
||||
}
|
||||
sched_reboot.save()
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Added agent # {count_agents + 1}"))
|
||||
|
||||
self.stdout.write("done")
|
||||
25
api/tacticalrmm/agents/management/commands/update_agents.py
Normal file
25
api/tacticalrmm/agents/management/commands/update_agents.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from agents.tasks import send_agent_update_task
|
||||
from tacticalrmm.utils import AGENT_DEFER
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Triggers an agent update task to run"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update: # type: ignore
|
||||
return
|
||||
|
||||
q = Agent.objects.defer(*AGENT_DEFER).exclude(version=settings.LATEST_AGENT_VER)
|
||||
agent_ids: list[str] = [
|
||||
i.agent_id
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
send_agent_update_task.delay(agent_ids=agent_ids)
|
||||
23
api/tacticalrmm/agents/migrations/0037_auto_20210627_0014.py
Normal file
23
api/tacticalrmm/agents/migrations/0037_auto_20210627_0014.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.4 on 2021-06-27 00:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0036_agent_block_policy_inheritance'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='has_patches_pending',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='pending_actions_count',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
27
api/tacticalrmm/agents/migrations/0038_agenthistory.py
Normal file
27
api/tacticalrmm/agents/migrations/0038_agenthistory.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-06 02:01
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0037_auto_20210627_0014'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AgentHistory',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('time', models.DateTimeField(auto_now_add=True)),
|
||||
('type', models.CharField(choices=[('task_run', 'Task Run'), ('script_run', 'Script Run'), ('cmd_run', 'CMD Run')], default='cmd_run', max_length=50)),
|
||||
('command', models.TextField(blank=True, null=True)),
|
||||
('status', models.CharField(choices=[('success', 'Success'), ('failure', 'Failure')], default='success', max_length=50)),
|
||||
('username', models.CharField(default='system', max_length=50)),
|
||||
('results', models.TextField(blank=True, null=True)),
|
||||
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to='agents.agent')),
|
||||
],
|
||||
),
|
||||
]
|
||||
25
api/tacticalrmm/agents/migrations/0039_auto_20210714_0738.py
Normal file
25
api/tacticalrmm/agents/migrations/0039_auto_20210714_0738.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.2.5 on 2021-07-14 07:38
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0008_script_guid'),
|
||||
('agents', '0038_agenthistory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='history', to='scripts.script'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agenthistory',
|
||||
name='script_results',
|
||||
field=models.JSONField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/agents/migrations/0040_auto_20211010_0249.py
Normal file
28
api/tacticalrmm/agents/migrations/0040_auto_20211010_0249.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.6 on 2021-10-10 02:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0039_auto_20210714_0738'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='agent',
|
||||
name='agent_id',
|
||||
field=models.CharField(max_length=200, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='agent',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='agent',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.6 on 2021-10-18 03:04
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0040_auto_20211010_0249'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='agenthistory',
|
||||
name='username',
|
||||
field=models.CharField(default='system', max_length=255),
|
||||
),
|
||||
]
|
||||
File diff suppressed because one or more lines are too long
@@ -16,17 +16,17 @@ from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
|
||||
class Agent(BaseAuditModel):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
version = models.CharField(default="0.1.0", max_length=255)
|
||||
salt_ver = models.CharField(default="1.0.3", max_length=255)
|
||||
operating_system = models.CharField(null=True, blank=True, max_length=255)
|
||||
@@ -35,7 +35,7 @@ class Agent(BaseAuditModel):
|
||||
hostname = models.CharField(max_length=255)
|
||||
salt_id = models.CharField(null=True, blank=True, max_length=255)
|
||||
local_ip = models.TextField(null=True, blank=True) # deprecated
|
||||
agent_id = models.CharField(max_length=200)
|
||||
agent_id = models.CharField(max_length=200, unique=True)
|
||||
last_seen = models.DateTimeField(null=True, blank=True)
|
||||
services = models.JSONField(null=True, blank=True)
|
||||
public_ip = models.CharField(null=True, max_length=255)
|
||||
@@ -64,6 +64,8 @@ class Agent(BaseAuditModel):
|
||||
)
|
||||
maintenance_mode = models.BooleanField(default=False)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
pending_actions_count = models.PositiveIntegerField(default=0)
|
||||
has_patches_pending = models.BooleanField(default=False)
|
||||
alert_template = models.ForeignKey(
|
||||
"alerts.AlertTemplate",
|
||||
related_name="agents",
|
||||
@@ -87,22 +89,24 @@ class Agent(BaseAuditModel):
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old agent if exists
|
||||
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
old_agent = Agent.objects.get(pk=self.pk) if self.pk else None
|
||||
super(Agent, self).save(old_model=old_agent, *args, **kwargs)
|
||||
|
||||
# check if new agent has been created
|
||||
# or check if policy have changed on agent
|
||||
# or if site has changed on agent and if so generate-policies
|
||||
# or if site has changed on agent and if so generate policies
|
||||
# or if agent was changed from server or workstation
|
||||
if (
|
||||
not old_agent
|
||||
or (old_agent and old_agent.policy != self.policy)
|
||||
or (old_agent.site != self.site)
|
||||
or (old_agent.monitoring_type != self.monitoring_type)
|
||||
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
self.generate_checks_from_policies()
|
||||
self.generate_tasks_from_policies()
|
||||
generate_agent_checks_task.delay(agents=[self.pk], create_tasks=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.hostname
|
||||
@@ -119,7 +123,7 @@ class Agent(BaseAuditModel):
|
||||
else:
|
||||
from core.models import CoreSettings
|
||||
|
||||
return CoreSettings.objects.first().default_time_zone
|
||||
return CoreSettings.objects.first().default_time_zone # type: ignore
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
@@ -161,10 +165,6 @@ class Agent(BaseAuditModel):
|
||||
else:
|
||||
return "offline"
|
||||
|
||||
@property
|
||||
def has_patches_pending(self):
|
||||
return self.winupdates.filter(action="approve").filter(installed=False).exists() # type: ignore
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
||||
@@ -263,6 +263,11 @@ class Agent(BaseAuditModel):
|
||||
make = [x["Manufacturer"] for x in mobo if "Manufacturer" in x][0]
|
||||
model = [x["Product"] for x in mobo if "Product" in x][0]
|
||||
|
||||
if make.lower() == "lenovo":
|
||||
sysfam = [x["SystemFamily"] for x in comp_sys if "SystemFamily" in x][0]
|
||||
if "to be filled" not in sysfam.lower():
|
||||
model = sysfam
|
||||
|
||||
return f"{make} {model}"
|
||||
except:
|
||||
pass
|
||||
@@ -320,6 +325,7 @@ class Agent(BaseAuditModel):
|
||||
full: bool = False,
|
||||
wait: bool = False,
|
||||
run_on_any: bool = False,
|
||||
history_pk: int = 0,
|
||||
) -> Any:
|
||||
|
||||
from scripts.models import Script
|
||||
@@ -338,6 +344,9 @@ class Agent(BaseAuditModel):
|
||||
},
|
||||
}
|
||||
|
||||
if history_pk != 0:
|
||||
data["id"] = history_pk
|
||||
|
||||
running_agent = self
|
||||
if run_on_any:
|
||||
nats_ping = {"func": "ping"}
|
||||
@@ -440,8 +449,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.server_policy
|
||||
and core_settings.server_policy.winupdatepolicy.exists()
|
||||
core_settings.server_policy # type: ignore
|
||||
and core_settings.server_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -449,7 +458,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get() # type: ignore
|
||||
|
||||
elif self.monitoring_type == "workstation":
|
||||
# check agent policy first which should override client or site policy
|
||||
@@ -478,8 +487,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.workstation_policy
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists()
|
||||
core_settings.workstation_policy # type: ignore
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists() # type: ignore
|
||||
):
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
@@ -488,7 +497,7 @@ class Agent(BaseAuditModel):
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = (
|
||||
core_settings.workstation_policy.winupdatepolicy.get()
|
||||
core_settings.workstation_policy.winupdatepolicy.get() # type: ignore
|
||||
)
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
@@ -603,35 +612,35 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if (
|
||||
core.alert_template
|
||||
and core.alert_template.is_active
|
||||
core.alert_template # type: ignore
|
||||
and core.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.alert_template)
|
||||
templates.append(core.alert_template) # type: ignore
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
and core.server_policy # type: ignore
|
||||
and core.server_policy.alert_template # type: ignore
|
||||
and core.server_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
templates.append(core.server_policy.alert_template) # type: ignore
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
and core.workstation_policy # type: ignore
|
||||
and core.workstation_policy.alert_template # type: ignore
|
||||
and core.workstation_policy.alert_template.is_active # type: ignore
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
templates.append(core.workstation_policy.alert_template) # type: ignore
|
||||
|
||||
# go through the templates and return the first one that isn't excluded
|
||||
for template in templates:
|
||||
@@ -692,7 +701,7 @@ class Agent(BaseAuditModel):
|
||||
key1 = key[0:48]
|
||||
key2 = key[48:]
|
||||
msg = '{{"a":{}, "u":"{}","time":{}}}'.format(
|
||||
action, user, int(time.time())
|
||||
action, user.lower(), int(time.time())
|
||||
)
|
||||
iv = get_random_bytes(16)
|
||||
|
||||
@@ -734,8 +743,8 @@ class Agent(BaseAuditModel):
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
ret = str(e)
|
||||
DebugLog.error(agent=self, log_type="agent_issues", message=ret)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
@@ -747,12 +756,9 @@ class Agent(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AgentEditSerializer
|
||||
from .serializers import AgentAuditSerializer
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
return AgentAuditSerializer(agent).data
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
@@ -767,7 +773,7 @@ class Agent(BaseAuditModel):
|
||||
# skip if no version info is available therefore nothing to parse
|
||||
try:
|
||||
vers = [
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip()
|
||||
re.search(r"\(Version(.*?)\)", i).group(1).strip() # type: ignore
|
||||
for i in titles
|
||||
]
|
||||
sorted_vers = sorted(vers, key=LooseVersion)
|
||||
@@ -802,7 +808,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
(
|
||||
f"Data has not been received from client {self.client.name}, "
|
||||
@@ -817,7 +823,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
CORE.send_mail( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
(
|
||||
f"Data has been received from client {self.client.name}, "
|
||||
@@ -832,7 +838,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -841,7 +847,7 @@ class Agent(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
CORE.send_sms( # type: ignore
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
alert_template=self.alert_template,
|
||||
)
|
||||
@@ -857,6 +863,8 @@ RECOVERY_CHOICES = [
|
||||
|
||||
|
||||
class RecoveryAction(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="recoveryactions",
|
||||
@@ -871,6 +879,8 @@ class RecoveryAction(models.Model):
|
||||
|
||||
|
||||
class Note(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="notes",
|
||||
@@ -891,6 +901,8 @@ class Note(models.Model):
|
||||
|
||||
|
||||
class AgentCustomField(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="custom_fields",
|
||||
@@ -913,7 +925,7 @@ class AgentCustomField(models.Model):
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.field
|
||||
return self.field.name
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
@@ -923,3 +935,59 @@ class AgentCustomField(models.Model):
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
"text",
|
||||
"number",
|
||||
"single",
|
||||
"datetime",
|
||||
]:
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == "multiple":
|
||||
self.multiple_value = value.split(",")
|
||||
self.save()
|
||||
elif self.field.type == "checkbox":
|
||||
self.bool_value = bool(value)
|
||||
self.save()
|
||||
|
||||
|
||||
AGENT_HISTORY_TYPES = (
|
||||
("task_run", "Task Run"),
|
||||
("script_run", "Script Run"),
|
||||
("cmd_run", "CMD Run"),
|
||||
)
|
||||
|
||||
AGENT_HISTORY_STATUS = (("success", "Success"), ("failure", "Failure"))
|
||||
|
||||
|
||||
class AgentHistory(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="history",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
time = models.DateTimeField(auto_now_add=True)
|
||||
type = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_TYPES, default="cmd_run"
|
||||
)
|
||||
command = models.TextField(null=True, blank=True)
|
||||
status = models.CharField(
|
||||
max_length=50, choices=AGENT_HISTORY_STATUS, default="success"
|
||||
)
|
||||
username = models.CharField(max_length=255, default="system")
|
||||
results = models.TextField(null=True, blank=True)
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="history",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_results = models.JSONField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.agent.hostname} - {self.type}"
|
||||
|
||||
123
api/tacticalrmm/agents/permissions.py
Normal file
123
api/tacticalrmm/agents/permissions.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
|
||||
|
||||
class AgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET":
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_agents") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_agents")
|
||||
elif r.method == "DELETE":
|
||||
return _has_perm(r, "can_uninstall_agents") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
if r.path == "/agents/maintenance/bulk/":
|
||||
return _has_perm(r, "can_edit_agent")
|
||||
else:
|
||||
return _has_perm(r, "can_edit_agent") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class RecoverAgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_recover_agents") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class MeshPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_use_mesh") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class UpdateAgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_update_agents")
|
||||
|
||||
|
||||
class PingAgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_ping_agents") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class ManageProcPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_manage_procs") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class EvtLogPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_view_eventlogs") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class SendCMDPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_send_cmd") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class RebootAgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_reboot_agents") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class InstallAgentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_install_agents")
|
||||
|
||||
|
||||
class RunScriptPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_run_scripts") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
|
||||
class AgentNotesPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
|
||||
# permissions for GET /agents/notes/ endpoint
|
||||
if r.method == "GET":
|
||||
|
||||
# permissions for /agents/<agent_id>/notes endpoint
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_notes") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_notes")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_notes")
|
||||
|
||||
|
||||
class RunBulkPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_run_bulk")
|
||||
|
||||
|
||||
class AgentHistoryPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_agent_history")
|
||||
@@ -1,15 +1,30 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from clients.serializers import ClientSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, AgentCustomField, Note
|
||||
from .models import Agent, AgentCustomField, Note, AgentHistory
|
||||
|
||||
|
||||
class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AgentCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"agent",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
# for vue
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
status = serializers.ReadOnlyField()
|
||||
cpu_model = serializers.ReadOnlyField()
|
||||
@@ -20,33 +35,21 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
checks = serializers.ReadOnlyField()
|
||||
timezone = serializers.ReadOnlyField()
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
client_name = serializers.ReadOnlyField(source="client.name")
|
||||
client = serializers.ReadOnlyField(source="client.name")
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||
patches_last_installed = serializers.ReadOnlyField()
|
||||
last_seen = serializers.ReadOnlyField()
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
exclude = [
|
||||
"last_seen",
|
||||
]
|
||||
|
||||
|
||||
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = [
|
||||
"pk",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
]
|
||||
exclude = ["id"]
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
status = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
last_seen = serializers.SerializerMethodField()
|
||||
@@ -69,9 +72,6 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"always_alert": obj.alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
def get_pending_actions(self, obj):
|
||||
return obj.pendingactions.filter(status="pending").count()
|
||||
|
||||
def get_last_seen(self, obj) -> str:
|
||||
if obj.time_zone is not None:
|
||||
agent_tz = pytz.timezone(obj.time_zone)
|
||||
@@ -94,17 +94,16 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = [
|
||||
"id",
|
||||
"agent_id",
|
||||
"alert_template",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site_name",
|
||||
"client_name",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"patches_pending",
|
||||
"pending_actions",
|
||||
"has_patches_pending",
|
||||
"pending_actions_count",
|
||||
"status",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
@@ -121,63 +120,7 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
depth = 2
|
||||
|
||||
|
||||
class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AgentCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"agent",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class AgentEditSerializer(serializers.ModelSerializer):
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
client = ClientSerializer(read_only=True)
|
||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = [
|
||||
"id",
|
||||
"hostname",
|
||||
"client",
|
||||
"site",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"time_zone",
|
||||
"timezone",
|
||||
"check_interval",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
"custom_fields",
|
||||
]
|
||||
|
||||
|
||||
class WinAgentSerializer(serializers.ModelSerializer):
|
||||
# for the windows agent
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
status = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = "__all__"
|
||||
@@ -190,24 +133,38 @@ class AgentHostnameSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = (
|
||||
"id",
|
||||
"hostname",
|
||||
"pk",
|
||||
"agent_id",
|
||||
"client",
|
||||
"site",
|
||||
)
|
||||
|
||||
|
||||
class NoteSerializer(serializers.ModelSerializer):
|
||||
class AgentNoteSerializer(serializers.ModelSerializer):
|
||||
username = serializers.ReadOnlyField(source="user.username")
|
||||
agent_id = serializers.ReadOnlyField(source="agent.agent_id")
|
||||
|
||||
class Meta:
|
||||
model = Note
|
||||
fields = "__all__"
|
||||
fields = ("pk", "entry_time", "agent", "user", "note", "username", "agent_id")
|
||||
extra_kwargs = {"agent": {"write_only": True}, "user": {"write_only": True}}
|
||||
|
||||
|
||||
class NotesSerializer(serializers.ModelSerializer):
|
||||
notes = NoteSerializer(many=True, read_only=True)
|
||||
class AgentHistorySerializer(serializers.ModelSerializer):
|
||||
time = serializers.SerializerMethodField(read_only=True)
|
||||
script_name = serializers.ReadOnlyField(source="script.name")
|
||||
|
||||
class Meta:
|
||||
model = AgentHistory
|
||||
fields = "__all__"
|
||||
|
||||
def get_time(self, history):
|
||||
tz = self.context["default_tz"]
|
||||
return history.time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")
|
||||
|
||||
|
||||
class AgentAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ["hostname", "pk", "notes"]
|
||||
exclude = ["disks", "services", "wmi_detail"]
|
||||
|
||||
@@ -1,66 +1,58 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
import urllib.parse
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from logs.models import DebugLog, PendingAction
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import run_nats_api_cmd
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from agents.models import Agent
|
||||
from agents.utils import get_winagent_url
|
||||
|
||||
|
||||
def agent_update(pk: int, codesigntoken: str = None) -> str:
|
||||
from agents.utils import get_exegen_url
|
||||
def agent_update(agent_id: str, force: bool = False) -> str:
|
||||
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
agent = Agent.objects.get(agent_id=agent_id)
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
|
||||
return "not supported"
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to determine arch on {agent.hostname}({agent.agent_id}). Skipping agent update.",
|
||||
)
|
||||
return "noarch"
|
||||
|
||||
version = settings.LATEST_AGENT_VER
|
||||
inno = agent.win_inno_exe
|
||||
url = get_winagent_url(agent.arch)
|
||||
|
||||
if codesigntoken is not None and pyver.parse(version) >= pyver.parse("1.5.0"):
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {"version": version, "arch": agent.arch, "token": codesigntoken}
|
||||
url = base_url + urllib.parse.urlencode(params)
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
agent.pendingactions.filter(
|
||||
if not force:
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).delete()
|
||||
).exists():
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).delete()
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "agentupdate",
|
||||
@@ -75,16 +67,21 @@ def agent_update(pk: int, codesigntoken: str = None) -> str:
|
||||
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(pks: list[int]) -> None:
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
def force_code_sign(agent_ids: list[str]) -> None:
|
||||
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk, codesigntoken)
|
||||
for agent_id in chunk:
|
||||
agent_update(agent_id=agent_id, force=True)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(agent_ids: list[str]) -> None:
|
||||
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
|
||||
for chunk in chunks:
|
||||
for agent_id in chunk:
|
||||
agent_update(agent_id)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
@@ -92,25 +89,20 @@ def send_agent_update_task(pks: list[int]) -> None:
|
||||
@app.task
|
||||
def auto_self_agent_update_task() -> None:
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update:
|
||||
if not core.agent_auto_update: # type:ignore
|
||||
return
|
||||
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
pks: list[int] = [
|
||||
i.pk
|
||||
q = Agent.objects.only("agent_id", "version")
|
||||
agent_ids: list[str] = [
|
||||
i.agent_id
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
chunks = (agent_ids[i : i + 30] for i in range(0, len(agent_ids), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk, codesigntoken)
|
||||
for agent_id in chunk:
|
||||
agent_update(agent_id)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
@@ -195,6 +187,7 @@ def agent_outages_task() -> None:
|
||||
|
||||
agents = Agent.objects.only(
|
||||
"pk",
|
||||
"agent_id",
|
||||
"last_seen",
|
||||
"offline_time",
|
||||
"overdue_time",
|
||||
@@ -215,14 +208,24 @@ def run_script_email_results_task(
|
||||
nats_timeout: int,
|
||||
emails: list[str],
|
||||
args: list[str] = [],
|
||||
history_pk: int = 0,
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
r = agent.run_script(
|
||||
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
|
||||
scriptpk=script.pk,
|
||||
args=args,
|
||||
full=True,
|
||||
timeout=nats_timeout,
|
||||
wait=True,
|
||||
history_pk=history_pk,
|
||||
)
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"{agent.hostname}({agent.pk}) timed out running script.",
|
||||
)
|
||||
return
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
@@ -238,43 +241,68 @@ def run_script_email_results_task(
|
||||
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CORE.smtp_from_email
|
||||
msg["From"] = CORE.smtp_from_email # type:ignore
|
||||
|
||||
if emails:
|
||||
msg["To"] = ", ".join(emails)
|
||||
else:
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients) # type:ignore
|
||||
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||
if CORE.smtp_requires_auth:
|
||||
with smtplib.SMTP(
|
||||
CORE.smtp_host, CORE.smtp_port, timeout=20 # type:ignore
|
||||
) as server: # type:ignore
|
||||
if CORE.smtp_requires_auth: # type:ignore
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||
server.login(
|
||||
CORE.smtp_host_user, CORE.smtp_host_password # type:ignore
|
||||
) # type:ignore
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
def clear_faults_task(older_than_days: int) -> None:
|
||||
# https://github.com/wh1te909/tacticalrmm/issues/484
|
||||
agents = Agent.objects.exclude(last_seen__isnull=True).filter(
|
||||
last_seen__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status != "online"]
|
||||
run_nats_api_cmd("monitor", ids)
|
||||
for agent in agents:
|
||||
if agent.agentchecks.exists():
|
||||
for check in agent.agentchecks.all():
|
||||
# reset check status
|
||||
check.status = "passing"
|
||||
check.save(update_fields=["status"])
|
||||
if check.alert.filter(resolved=False).exists():
|
||||
check.alert.get(resolved=False).resolve()
|
||||
|
||||
# reset overdue alerts
|
||||
agent.overdue_email_alert = False
|
||||
agent.overdue_text_alert = False
|
||||
agent.overdue_dashboard_alert = False
|
||||
agent.save(
|
||||
update_fields=[
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status == "online"]
|
||||
run_nats_api_cmd("wmi", ids)
|
||||
def prune_agent_history(older_than_days: int) -> str:
|
||||
from .models import AgentHistory
|
||||
|
||||
AgentHistory.objects.filter(
|
||||
time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,32 +1,44 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
from checks.views import GetAddChecks
|
||||
from autotasks.views import GetAddAutoTasks
|
||||
from logs.views import PendingActions
|
||||
|
||||
urlpatterns = [
|
||||
path("listagents/", views.AgentsTableList.as_view()),
|
||||
path("listagentsnodetail/", views.list_agents_no_detail),
|
||||
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
||||
path("overdueaction/", views.overdue_action),
|
||||
path("sendrawcmd/", views.send_raw_cmd),
|
||||
path("<pk>/agentdetail/", views.agent_detail),
|
||||
path("<int:pk>/meshcentral/", views.meshcentral),
|
||||
# agent views
|
||||
path("", views.GetAgents.as_view()),
|
||||
path("<agent:agent_id>/", views.GetUpdateDeleteAgent.as_view()),
|
||||
path("<agent:agent_id>/cmd/", views.send_raw_cmd),
|
||||
path("<agent:agent_id>/runscript/", views.run_script),
|
||||
path("<agent:agent_id>/wmi/", views.WMI.as_view()),
|
||||
path("<agent:agent_id>/recover/", views.recover),
|
||||
path("<agent:agent_id>/reboot/", views.Reboot.as_view()),
|
||||
path("<agent:agent_id>/ping/", views.ping),
|
||||
# alias for checks get view
|
||||
path("<agent:agent_id>/checks/", GetAddChecks.as_view()),
|
||||
# alias for autotasks get view
|
||||
path("<agent:agent_id>/tasks/", GetAddAutoTasks.as_view()),
|
||||
# alias for pending actions get view
|
||||
path("<agent:agent_id>/pendingactions/", PendingActions.as_view()),
|
||||
# agent remote background
|
||||
path("<agent:agent_id>/meshcentral/", views.AgentMeshCentral.as_view()),
|
||||
path("<agent:agent_id>/meshcentral/recover/", views.AgentMeshCentral.as_view()),
|
||||
path("<agent:agent_id>/processes/", views.AgentProcesses.as_view()),
|
||||
path("<agent:agent_id>/processes/<int:pid>/", views.AgentProcesses.as_view()),
|
||||
path("<agent:agent_id>/eventlog/<str:logtype>/<int:days>/", views.get_event_log),
|
||||
# agent history
|
||||
path("history/", views.AgentHistoryView.as_view()),
|
||||
path("<agent:agent_id>/history/", views.AgentHistoryView.as_view()),
|
||||
# agent notes
|
||||
path("notes/", views.GetAddNotes.as_view()),
|
||||
path("notes/<int:pk>/", views.GetEditDeleteNote.as_view()),
|
||||
path("<agent:agent_id>/notes/", views.GetAddNotes.as_view()),
|
||||
# bulk actions
|
||||
path("maintenance/bulk/", views.agent_maintenance),
|
||||
path("actions/bulk/", views.bulk),
|
||||
path("versions/", views.get_agent_versions),
|
||||
path("update/", views.update_agents),
|
||||
path("installer/", views.install_agent),
|
||||
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
|
||||
path("uninstall/", views.uninstall),
|
||||
path("editagent/", views.edit_agent),
|
||||
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
|
||||
path("getagentversions/", views.get_agent_versions),
|
||||
path("updateagents/", views.update_agents),
|
||||
path("<pk>/getprocs/", views.get_processes),
|
||||
path("<pk>/<pid>/killproc/", views.kill_proc),
|
||||
path("reboot/", views.Reboot.as_view()),
|
||||
path("installagent/", views.install_agent),
|
||||
path("<int:pk>/ping/", views.ping),
|
||||
path("recover/", views.recover),
|
||||
path("runscript/", views.run_script),
|
||||
path("<int:pk>/recovermesh/", views.recover_mesh),
|
||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
||||
path("bulk/", views.bulk),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import random
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
|
||||
from django.conf import settings
|
||||
from core.models import CodeSignToken
|
||||
|
||||
|
||||
def get_exegen_url() -> str:
|
||||
@@ -20,18 +21,20 @@ def get_exegen_url() -> str:
|
||||
|
||||
|
||||
def get_winagent_url(arch: str) -> str:
|
||||
from core.models import CodeSignToken
|
||||
|
||||
dl_url = settings.DL_32 if arch == "32" else settings.DL_64
|
||||
|
||||
try:
|
||||
codetoken = CodeSignToken.objects.first().token
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"arch": arch,
|
||||
"token": codetoken,
|
||||
}
|
||||
dl_url = base_url + urllib.parse.urlencode(params)
|
||||
t: CodeSignToken = CodeSignToken.objects.first() # type: ignore
|
||||
if t.is_valid:
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"arch": arch,
|
||||
"token": t.token,
|
||||
}
|
||||
dl_url = base_url + urllib.parse.urlencode(params)
|
||||
except:
|
||||
dl_url = settings.DL_64 if arch == "64" else settings.DL_32
|
||||
pass
|
||||
|
||||
return dl_url
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
33
api/tacticalrmm/alerts/migrations/0007_auto_20210721_0423.py
Normal file
33
api/tacticalrmm/alerts/migrations/0007_auto_20210721_0423.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 04:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0006_auto_20210217_1736'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='created_time',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_by',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='modified_time',
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0008_auto_20210721_1757.py
Normal file
28
api/tacticalrmm/alerts/migrations/0008_auto_20210721_1757.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 17:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0007_auto_20210721_0423'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/alerts/migrations/0009_auto_20210721_1810.py
Normal file
28
api/tacticalrmm/alerts/migrations/0009_auto_20210721_1810.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.1 on 2021-07-21 18:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0008_auto_20210721_1757'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_script_actions',
|
||||
field=models.BooleanField(blank=True, default=True, null=True),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/alerts/migrations/0010_auto_20210917_1954.py
Normal file
23
api/tacticalrmm/alerts/migrations/0010_auto_20210917_1954.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-17 19:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("alerts", "0009_auto_20210721_1810"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="alerttemplate",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="alerttemplate",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -3,19 +3,19 @@ from __future__ import annotations
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
SEVERITY_CHOICES = [
|
||||
("info", "Informational"),
|
||||
@@ -32,6 +32,8 @@ ALERT_TYPE_CHOICES = [
|
||||
|
||||
|
||||
class Alert(models.Model):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="agent",
|
||||
@@ -173,6 +175,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.agent_always_email
|
||||
always_text = alert_template.agent_always_text
|
||||
alert_interval = alert_template.agent_periodic_alert_days
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_availability_alert(instance)
|
||||
@@ -209,6 +212,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.check_always_email
|
||||
always_text = alert_template.check_always_text
|
||||
alert_interval = alert_template.check_periodic_alert_days
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_check_alert(instance)
|
||||
@@ -242,6 +246,7 @@ class Alert(models.Model):
|
||||
always_email = alert_template.task_always_email
|
||||
always_text = alert_template.task_always_text
|
||||
alert_interval = alert_template.task_periodic_alert_days
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
if instance.should_create_alert(alert_template):
|
||||
alert = cls.create_or_return_task_alert(instance)
|
||||
@@ -295,7 +300,7 @@ class Alert(models.Model):
|
||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||
|
||||
# check if any scripts should be run
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
if alert_template and alert_template.action and run_script_action and not alert.action_run: # type: ignore
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
@@ -314,8 +319,10 @@ class Alert(models.Model):
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -345,6 +352,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.agent_email_on_resolved
|
||||
text_on_resolved = alert_template.agent_text_on_resolved
|
||||
run_script_action = alert_template.agent_script_actions
|
||||
|
||||
elif isinstance(instance, Check):
|
||||
from checks.tasks import (
|
||||
@@ -363,6 +371,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.check_email_on_resolved
|
||||
text_on_resolved = alert_template.check_text_on_resolved
|
||||
run_script_action = alert_template.check_script_actions
|
||||
|
||||
elif isinstance(instance, AutomatedTask):
|
||||
from autotasks.tasks import (
|
||||
@@ -381,6 +390,7 @@ class Alert(models.Model):
|
||||
if alert_template:
|
||||
email_on_resolved = alert_template.task_email_on_resolved
|
||||
text_on_resolved = alert_template.task_text_on_resolved
|
||||
run_script_action = alert_template.task_script_actions
|
||||
|
||||
else:
|
||||
return
|
||||
@@ -403,6 +413,7 @@ class Alert(models.Model):
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and run_script_action # type: ignore
|
||||
and not alert.resolved_action_run
|
||||
):
|
||||
r = agent.run_script(
|
||||
@@ -425,8 +436,10 @@ class Alert(models.Model):
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="scripting",
|
||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: list[str]):
|
||||
@@ -443,15 +456,16 @@ class Alert(models.Model):
|
||||
if match:
|
||||
name = match.group(1)
|
||||
|
||||
if hasattr(self, name):
|
||||
value = getattr(self, name)
|
||||
# check if attr exists and isn't a function
|
||||
if hasattr(self, name) and not callable(getattr(self, name)):
|
||||
value = f"'{getattr(self, name)}'"
|
||||
else:
|
||||
continue
|
||||
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)) # type: ignore
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
DebugLog.error(log_type="scripting", message=str(e))
|
||||
continue
|
||||
|
||||
else:
|
||||
@@ -460,7 +474,7 @@ class Alert(models.Model):
|
||||
return temp_args
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
class AlertTemplate(BaseAuditModel):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
@@ -517,6 +531,7 @@ class AlertTemplate(models.Model):
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
agent_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
@@ -540,6 +555,7 @@ class AlertTemplate(models.Model):
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
check_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
@@ -563,6 +579,7 @@ class AlertTemplate(models.Model):
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
task_script_actions = BooleanField(null=True, blank=True, default=True)
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
@@ -581,6 +598,13 @@ class AlertTemplate(models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(alert_template):
|
||||
# serializes the agent and returns json
|
||||
from .serializers import AlertTemplateAuditSerializer
|
||||
|
||||
return AlertTemplateAuditSerializer(alert_template).data
|
||||
|
||||
@property
|
||||
def has_agent_settings(self) -> bool:
|
||||
return (
|
||||
|
||||
55
api/tacticalrmm/alerts/permissions.py
Normal file
55
api/tacticalrmm/alerts/permissions.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
|
||||
|
||||
def _has_perm_on_alert(user, id: int):
|
||||
from alerts.models import Alert
|
||||
|
||||
role = user.role
|
||||
if user.is_superuser or (role and getattr(role, "is_superuser")):
|
||||
return True
|
||||
|
||||
# make sure non-superusers with empty roles aren't permitted
|
||||
elif not role:
|
||||
return False
|
||||
|
||||
alert = get_object_or_404(Alert, id=id)
|
||||
|
||||
if alert.agent:
|
||||
agent_id = alert.agent.agent_id
|
||||
elif alert.assigned_check:
|
||||
agent_id = alert.assigned_check.agent.agent_id
|
||||
elif alert.assigned_task:
|
||||
agent_id = alert.assigned_task.agent.agent_id
|
||||
else:
|
||||
return True
|
||||
|
||||
return _has_perm_on_agent(user, agent_id)
|
||||
|
||||
|
||||
class AlertPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET" or r.method == "PATCH":
|
||||
if "pk" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_alerts") and _has_perm_on_alert(
|
||||
r.user, view.kwargs["pk"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_alerts")
|
||||
else:
|
||||
if "pk" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_manage_alerts") and _has_perm_on_alert(
|
||||
r.user, view.kwargs["pk"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_manage_alerts")
|
||||
|
||||
|
||||
class AlertTemplatePerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_alerttemplates")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_alerttemplates")
|
||||
@@ -2,7 +2,7 @@ from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from automation.serializers import PolicySerializer
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
@@ -10,12 +10,29 @@ from .models import Alert, AlertTemplate
|
||||
|
||||
class AlertSerializer(ModelSerializer):
|
||||
|
||||
hostname = SerializerMethodField(read_only=True)
|
||||
client = SerializerMethodField(read_only=True)
|
||||
site = SerializerMethodField(read_only=True)
|
||||
alert_time = SerializerMethodField(read_only=True)
|
||||
resolve_on = SerializerMethodField(read_only=True)
|
||||
snoozed_until = SerializerMethodField(read_only=True)
|
||||
hostname = SerializerMethodField()
|
||||
agent_id = SerializerMethodField()
|
||||
client = SerializerMethodField()
|
||||
site = SerializerMethodField()
|
||||
alert_time = SerializerMethodField()
|
||||
resolve_on = SerializerMethodField()
|
||||
snoozed_until = SerializerMethodField()
|
||||
|
||||
def get_agent_id(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
return instance.agent.agent_id if instance.agent else ""
|
||||
elif instance.alert_type == "check":
|
||||
return (
|
||||
instance.assigned_check.agent.agent_id
|
||||
if instance.assigned_check
|
||||
else ""
|
||||
)
|
||||
elif instance.alert_type == "task":
|
||||
return (
|
||||
instance.assigned_task.agent.agent_id if instance.assigned_task else ""
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_hostname(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
@@ -113,9 +130,15 @@ class AlertTemplateSerializer(ModelSerializer):
|
||||
|
||||
class AlertTemplateRelationSerializer(ModelSerializer):
|
||||
policies = PolicySerializer(read_only=True, many=True)
|
||||
clients = ClientSerializer(read_only=True, many=True)
|
||||
sites = SiteSerializer(read_only=True, many=True)
|
||||
clients = ClientMinimumSerializer(read_only=True, many=True)
|
||||
sites = SiteMinimumSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class AlertTemplateAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from alerts.models import Alert
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def unsnooze_alerts() -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||
snoozed=False, snooze_until=None
|
||||
@@ -22,3 +21,14 @@ def cache_agents_alert_template():
|
||||
agent.set_alert_template()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_resolved_alerts(older_than_days: int) -> str:
|
||||
from .models import Alert
|
||||
|
||||
Alert.objects.filter(resolved=True).filter(
|
||||
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
from itertools import cycle
|
||||
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from core.tasks import cache_db_fields_task
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
@@ -17,6 +18,8 @@ from .serializers import (
|
||||
AlertTemplateSerializer,
|
||||
)
|
||||
|
||||
base_url = "/alerts"
|
||||
|
||||
|
||||
class TestAlertsViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -24,7 +27,7 @@ class TestAlertsViews(TacticalTestCase):
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_alerts(self):
|
||||
url = "/alerts/alerts/"
|
||||
url = "/alerts/"
|
||||
|
||||
# create check, task, and agent to test each serializer function
|
||||
check = baker.make_recipe("checks.diskspace_check")
|
||||
@@ -117,7 +120,7 @@ class TestAlertsViews(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_add_alert(self):
|
||||
url = "/alerts/alerts/"
|
||||
url = "/alerts/"
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
data = {
|
||||
@@ -134,11 +137,11 @@ class TestAlertsViews(TacticalTestCase):
|
||||
|
||||
def test_get_alert(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.get("/alerts/alerts/500/", format="json")
|
||||
resp = self.client.get("/alerts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert = baker.make("alerts.Alert")
|
||||
url = f"/alerts/alerts/{alert.pk}/" # type: ignore
|
||||
url = f"/alerts/{alert.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertSerializer(alert)
|
||||
@@ -150,16 +153,15 @@ class TestAlertsViews(TacticalTestCase):
|
||||
|
||||
def test_update_alert(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerts/500/", format="json")
|
||||
resp = self.client.put("/alerts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert = baker.make("alerts.Alert", resolved=False, snoozed=False)
|
||||
|
||||
url = f"/alerts/alerts/{alert.pk}/" # type: ignore
|
||||
url = f"/alerts/{alert.pk}/" # type: ignore
|
||||
|
||||
# test resolving alert
|
||||
data = {
|
||||
"id": alert.pk, # type: ignore
|
||||
"type": "resolve",
|
||||
}
|
||||
resp = self.client.put(url, data, format="json")
|
||||
@@ -168,26 +170,26 @@ class TestAlertsViews(TacticalTestCase):
|
||||
self.assertTrue(Alert.objects.get(pk=alert.pk).resolved_on) # type: ignore
|
||||
|
||||
# test snoozing alert
|
||||
data = {"id": alert.pk, "type": "snooze", "snooze_days": "30"} # type: ignore
|
||||
data = {"type": "snooze", "snooze_days": "30"} # type: ignore
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Alert.objects.get(pk=alert.pk).snoozed) # type: ignore
|
||||
self.assertTrue(Alert.objects.get(pk=alert.pk).snooze_until) # type: ignore
|
||||
|
||||
# test snoozing alert without snooze_days
|
||||
data = {"id": alert.pk, "type": "snooze"} # type: ignore
|
||||
data = {"type": "snooze"} # type: ignore
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test unsnoozing alert
|
||||
data = {"id": alert.pk, "type": "unsnooze"} # type: ignore
|
||||
data = {"type": "unsnooze"} # type: ignore
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertFalse(Alert.objects.get(pk=alert.pk).snoozed) # type: ignore
|
||||
self.assertFalse(Alert.objects.get(pk=alert.pk).snooze_until) # type: ignore
|
||||
|
||||
# test invalid type
|
||||
data = {"id": alert.pk, "type": "invalid"} # type: ignore
|
||||
data = {"type": "invalid"} # type: ignore
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
@@ -195,13 +197,13 @@ class TestAlertsViews(TacticalTestCase):
|
||||
|
||||
def test_delete_alert(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerts/500/", format="json")
|
||||
resp = self.client.put("/alerts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert = baker.make("alerts.Alert")
|
||||
|
||||
# test delete alert
|
||||
url = f"/alerts/alerts/{alert.pk}/" # type: ignore
|
||||
url = f"/alerts/{alert.pk}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
@@ -243,7 +245,7 @@ class TestAlertsViews(TacticalTestCase):
|
||||
self.assertTrue(Alert.objects.filter(snoozed=False).exists())
|
||||
|
||||
def test_get_alert_templates(self):
|
||||
url = "/alerts/alerttemplates/"
|
||||
url = "/alerts/templates/"
|
||||
|
||||
alert_templates = baker.make("alerts.AlertTemplate", _quantity=3)
|
||||
resp = self.client.get(url, format="json")
|
||||
@@ -255,7 +257,7 @@ class TestAlertsViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_alert_template(self):
|
||||
url = "/alerts/alerttemplates/"
|
||||
url = "/alerts/templates/"
|
||||
|
||||
data = {
|
||||
"name": "Test Template",
|
||||
@@ -268,11 +270,11 @@ class TestAlertsViews(TacticalTestCase):
|
||||
|
||||
def test_get_alert_template(self):
|
||||
# returns 404 for invalid alert template pk
|
||||
resp = self.client.get("/alerts/alerttemplates/500/", format="json")
|
||||
resp = self.client.get("/alerts/templates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/" # type: ignore
|
||||
url = f"/alerts/templates/{alert_template.pk}/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template)
|
||||
@@ -284,16 +286,15 @@ class TestAlertsViews(TacticalTestCase):
|
||||
|
||||
def test_update_alert_template(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerttemplates/500/", format="json")
|
||||
resp = self.client.put("/alerts/templates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/" # type: ignore
|
||||
url = f"/alerts/templates/{alert_template.pk}/" # type: ignore
|
||||
|
||||
# test data
|
||||
data = {
|
||||
"id": alert_template.pk, # type: ignore
|
||||
"agent_email_on_resolved": True,
|
||||
"agent_text_on_resolved": True,
|
||||
"agent_include_desktops": True,
|
||||
@@ -309,13 +310,13 @@ class TestAlertsViews(TacticalTestCase):
|
||||
|
||||
def test_delete_alert_template(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerttemplates/500/", format="json")
|
||||
resp = self.client.put("/alerts/templates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
|
||||
# test delete alert
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/" # type: ignore
|
||||
url = f"/alerts/templates/{alert_template.pk}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
@@ -330,10 +331,10 @@ class TestAlertsViews(TacticalTestCase):
|
||||
baker.make("clients.Site", alert_template=alert_template, _quantity=3)
|
||||
baker.make("automation.Policy", alert_template=alert_template)
|
||||
core = CoreSettings.objects.first()
|
||||
core.alert_template = alert_template
|
||||
core.save()
|
||||
core.alert_template = alert_template # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/related/" # type: ignore
|
||||
url = f"/alerts/templates/{alert_template.pk}/related/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateRelationSerializer(alert_template)
|
||||
@@ -403,16 +404,16 @@ class TestAlertTasks(TacticalTestCase):
|
||||
# assign first Alert Template as to a policy and apply it as default
|
||||
policy.alert_template = alert_templates[0] # type: ignore
|
||||
policy.save() # type: ignore
|
||||
core.workstation_policy = policy
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
core.workstation_policy = policy # type: ignore
|
||||
core.server_policy = policy # type: ignore
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk) # type: ignore
|
||||
|
||||
# assign second Alert Template to as default alert template
|
||||
core.alert_template = alert_templates[1] # type: ignore
|
||||
core.save()
|
||||
core.save() # type: ignore
|
||||
|
||||
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
self.assertEquals(server.set_alert_template().pk, alert_templates[1].pk) # type: ignore
|
||||
@@ -514,6 +515,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
)
|
||||
|
||||
from alerts.models import Alert
|
||||
|
||||
agent_dashboard_alert = baker.make_recipe("agents.overdue_agent")
|
||||
@@ -675,25 +677,14 @@ class TestAlertTasks(TacticalTestCase):
|
||||
url = "/api/v3/checkin/"
|
||||
|
||||
agent_template_text.version = settings.LATEST_AGENT_VER
|
||||
agent_template_text.last_seen = djangotime.now()
|
||||
agent_template_text.save()
|
||||
|
||||
agent_template_email.version = settings.LATEST_AGENT_VER
|
||||
agent_template_email.last_seen = djangotime.now()
|
||||
agent_template_email.save()
|
||||
|
||||
data = {
|
||||
"agent_id": agent_template_text.agent_id,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
data = {
|
||||
"agent_id": agent_template_email.agent_id,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
cache_db_fields_task()
|
||||
|
||||
recovery_sms.assert_called_with(
|
||||
pk=Alert.objects.get(agent=agent_template_text).pk
|
||||
@@ -727,7 +718,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from checks.models import Check
|
||||
from checks.tasks import (
|
||||
handle_check_email_alert_task,
|
||||
@@ -736,6 +726,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_resolved_check_sms_alert_task,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1011,7 +1003,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
send_email,
|
||||
sleep,
|
||||
):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import (
|
||||
handle_resolved_task_email_alert,
|
||||
@@ -1020,6 +1011,8 @@ class TestAlertTasks(TacticalTestCase):
|
||||
handle_task_sms_alert,
|
||||
)
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
# create test data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_no_settings = baker.make_recipe("agents.agent")
|
||||
@@ -1272,17 +1265,17 @@ class TestAlertTasks(TacticalTestCase):
|
||||
)
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
core.smtp_host = "test.test.com"
|
||||
core.smtp_port = 587
|
||||
core.smtp_recipients = ["recipient@test.com"]
|
||||
core.twilio_account_sid = "test"
|
||||
core.twilio_auth_token = "1234123412341234"
|
||||
core.sms_alert_recipients = ["+1234567890"]
|
||||
core.smtp_host = "test.test.com" # type: ignore
|
||||
core.smtp_port = 587 # type: ignore
|
||||
core.smtp_recipients = ["recipient@test.com"] # type: ignore
|
||||
core.twilio_account_sid = "test" # type: ignore
|
||||
core.twilio_auth_token = "1234123412341234" # type: ignore
|
||||
core.sms_alert_recipients = ["+1234567890"] # type: ignore
|
||||
|
||||
# test sending email with alert template settings
|
||||
core.send_mail("Test", "Test", alert_template=alert_template)
|
||||
core.send_mail("Test", "Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
core.send_sms("Test", alert_template=alert_template)
|
||||
core.send_sms("Test", alert_template=alert_template) # type: ignore
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.tasks.agent_outage_sms_task.delay")
|
||||
@@ -1315,6 +1308,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
"alerts.AlertTemplate",
|
||||
is_active=True,
|
||||
agent_always_alert=True,
|
||||
agent_script_actions=False,
|
||||
action=failure_action,
|
||||
action_timeout=30,
|
||||
resolved_action=resolved_action,
|
||||
@@ -1328,6 +1322,14 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# should not have been called since agent_script_actions is set to False
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
alert_template.agent_script_actions = True # type: ignore
|
||||
alert_template.save() # type: ignore
|
||||
|
||||
agent_outages_task()
|
||||
|
||||
# this is what data should be
|
||||
data = {
|
||||
"func": "runscriptfull",
|
||||
@@ -1340,14 +1342,6 @@ class TestAlertTasks(TacticalTestCase):
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# Setup cmd mock
|
||||
success = {
|
||||
"retcode": 0,
|
||||
"stdout": "success!",
|
||||
"stderr": "",
|
||||
"execution_time": 5.0000,
|
||||
}
|
||||
|
||||
nats_cmd.side_effect = ["pong", success]
|
||||
|
||||
# make sure script run results were stored
|
||||
@@ -1361,15 +1355,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save()
|
||||
|
||||
url = "/api/v3/checkin/"
|
||||
|
||||
data = {
|
||||
"agent_id": agent.agent_id,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
cache_db_fields_task()
|
||||
|
||||
# this is what data should be
|
||||
data = {
|
||||
@@ -1387,3 +1373,199 @@ class TestAlertTasks(TacticalTestCase):
|
||||
self.assertEqual(alert.resolved_action_execution_time, "5.0000")
|
||||
self.assertEqual(alert.resolved_action_stdout, "success!")
|
||||
self.assertEqual(alert.resolved_action_stderr, "")
|
||||
|
||||
def test_parse_script_args(self):
|
||||
alert = baker.make("alerts.Alert")
|
||||
|
||||
args = ["-Parameter", "-Another {{alert.id}}"]
|
||||
|
||||
# test default value
|
||||
self.assertEqual(
|
||||
["-Parameter", f"-Another '{alert.id}'"], # type: ignore
|
||||
alert.parse_script_args(args=args), # type: ignore
|
||||
)
|
||||
|
||||
def test_prune_resolved_alerts(self):
|
||||
from .tasks import prune_resolved_alerts
|
||||
|
||||
# setup data
|
||||
resolved_alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=True,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
resolved=False,
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
days = 0
|
||||
for alert in resolved_alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
days = 0
|
||||
for alert in alerts: # type: ignore
|
||||
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
|
||||
alert.save()
|
||||
days = days + 5
|
||||
|
||||
# delete AgentHistory older than 30 days
|
||||
prune_resolved_alerts(30)
|
||||
|
||||
self.assertEqual(Alert.objects.count(), 31)
|
||||
|
||||
|
||||
class TestAlertPermissions(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.client_setup()
|
||||
|
||||
def test_get_alerts_permissions(self):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent1 = baker.make_recipe("agents.agent")
|
||||
agent2 = baker.make_recipe("agents.agent")
|
||||
agents = [agent, agent1, agent2]
|
||||
checks = baker.make("checks.Check", agent=cycle(agents), _quantity=3)
|
||||
tasks = baker.make("autotasks.AutomatedTask", agent=cycle(agents), _quantity=3)
|
||||
baker.make(
|
||||
"alerts.Alert", alert_type="task", assigned_task=cycle(tasks), _quantity=3
|
||||
)
|
||||
baker.make(
|
||||
"alerts.Alert",
|
||||
alert_type="check",
|
||||
assigned_check=cycle(checks),
|
||||
_quantity=3,
|
||||
)
|
||||
baker.make(
|
||||
"alerts.Alert", alert_type="availability", agent=cycle(agents), _quantity=3
|
||||
)
|
||||
baker.make("alerts.Alert", alert_type="custom", _quantity=4)
|
||||
|
||||
# test super user access
|
||||
r = self.check_authorized_superuser("patch", f"{base_url}/")
|
||||
self.assertEqual(len(r.data), 13) # type: ignore
|
||||
|
||||
user = self.create_user_with_roles([])
|
||||
self.client.force_authenticate(user=user) # type: ignore
|
||||
|
||||
self.check_not_authorized("patch", f"{base_url}/")
|
||||
|
||||
# add list software role to user
|
||||
user.role.can_list_alerts = True
|
||||
user.role.save()
|
||||
|
||||
r = self.check_authorized("patch", f"{base_url}/")
|
||||
self.assertEqual(len(r.data), 13) # type: ignore
|
||||
|
||||
# test limiting to client
|
||||
user.role.can_view_clients.set([agent.client])
|
||||
r = self.check_authorized("patch", f"{base_url}/")
|
||||
self.assertEqual(len(r.data), 7) # type: ignore
|
||||
|
||||
# test limiting to site
|
||||
user.role.can_view_clients.clear()
|
||||
user.role.can_view_sites.set([agent1.site])
|
||||
r = self.client.patch(f"{base_url}/")
|
||||
self.assertEqual(len(r.data), 7) # type: ignore
|
||||
|
||||
# test limiting to site and client
|
||||
user.role.can_view_clients.set([agent2.client])
|
||||
r = self.client.patch(f"{base_url}/")
|
||||
self.assertEqual(len(r.data), 10) # type: ignore
|
||||
|
||||
@patch("alerts.models.Alert.delete", return_value=1)
|
||||
def test_edit_delete_get_alert_permissions(self, delete):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent1 = baker.make_recipe("agents.agent")
|
||||
agent2 = baker.make_recipe("agents.agent")
|
||||
agents = [agent, agent1, agent2]
|
||||
checks = baker.make("checks.Check", agent=cycle(agents), _quantity=3)
|
||||
tasks = baker.make("autotasks.AutomatedTask", agent=cycle(agents), _quantity=3)
|
||||
alert_tasks = baker.make(
|
||||
"alerts.Alert", alert_type="task", assigned_task=cycle(tasks), _quantity=3
|
||||
)
|
||||
alert_checks = baker.make(
|
||||
"alerts.Alert",
|
||||
alert_type="check",
|
||||
assigned_check=cycle(checks),
|
||||
_quantity=3,
|
||||
)
|
||||
alert_agents = baker.make(
|
||||
"alerts.Alert", alert_type="availability", agent=cycle(agents), _quantity=3
|
||||
)
|
||||
alert_custom = baker.make("alerts.Alert", alert_type="custom", _quantity=4)
|
||||
|
||||
# alert task url
|
||||
task_url = f"{base_url}/{alert_tasks[0].id}/" # for agent
|
||||
unauthorized_task_url = f"{base_url}/{alert_tasks[1].id}/" # for agent1
|
||||
# alert check url
|
||||
check_url = f"{base_url}/{alert_checks[0].id}/" # for agent
|
||||
unauthorized_check_url = f"{base_url}/{alert_checks[1].id}/" # for agent1
|
||||
# alert agent url
|
||||
agent_url = f"{base_url}/{alert_agents[0].id}/" # for agent
|
||||
unauthorized_agent_url = f"{base_url}/{alert_agents[1].id}/" # for agent1
|
||||
# custom alert url
|
||||
custom_url = f"{base_url}/{alert_custom[0].id}/" # no agent associated
|
||||
|
||||
authorized_urls = [task_url, check_url, agent_url, custom_url]
|
||||
unauthorized_urls = [
|
||||
unauthorized_agent_url,
|
||||
unauthorized_check_url,
|
||||
unauthorized_task_url,
|
||||
]
|
||||
|
||||
for method in ["get", "put", "delete"]:
|
||||
|
||||
# test superuser access
|
||||
for url in authorized_urls:
|
||||
self.check_authorized_superuser(method, url)
|
||||
|
||||
for url in unauthorized_urls:
|
||||
self.check_authorized_superuser(method, url)
|
||||
|
||||
user = self.create_user_with_roles([])
|
||||
self.client.force_authenticate(user=user) # type: ignore
|
||||
|
||||
# test user without role
|
||||
for url in authorized_urls:
|
||||
self.check_not_authorized(method, url)
|
||||
|
||||
for url in unauthorized_urls:
|
||||
self.check_not_authorized(method, url)
|
||||
|
||||
# add user to role and test
|
||||
setattr(
|
||||
user.role,
|
||||
"can_list_alerts" if method == "get" else "can_manage_alerts",
|
||||
True,
|
||||
)
|
||||
user.role.save()
|
||||
|
||||
# test user with role
|
||||
for url in authorized_urls:
|
||||
self.check_authorized(method, url)
|
||||
|
||||
for url in unauthorized_urls:
|
||||
self.check_authorized(method, url)
|
||||
|
||||
# limit user to client if agent check
|
||||
user.role.can_view_clients.set([agent.client])
|
||||
|
||||
for url in authorized_urls:
|
||||
self.check_authorized(method, url)
|
||||
|
||||
for url in unauthorized_urls:
|
||||
self.check_not_authorized(method, url)
|
||||
|
||||
# limit user to client if agent check
|
||||
user.role.can_view_sites.set([agent1.site])
|
||||
|
||||
for url in authorized_urls:
|
||||
self.check_authorized(method, url)
|
||||
|
||||
for url in unauthorized_urls:
|
||||
self.check_authorized(method, url)
|
||||
|
||||
@@ -3,10 +3,10 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("alerts/", views.GetAddAlerts.as_view()),
|
||||
path("", views.GetAddAlerts.as_view()),
|
||||
path("<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
||||
path("bulk/", views.BulkAlerts.as_view()),
|
||||
path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
||||
path("alerttemplates/", views.GetAddAlertTemplates.as_view()),
|
||||
path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
|
||||
path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
|
||||
path("templates/", views.GetAddAlertTemplates.as_view()),
|
||||
path("templates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
|
||||
path("templates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
|
||||
]
|
||||
|
||||
@@ -3,12 +3,14 @@ from datetime import datetime as dt
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
from .permissions import AlertPerms, AlertTemplatePerms
|
||||
from .serializers import (
|
||||
AlertSerializer,
|
||||
AlertTemplateRelationSerializer,
|
||||
@@ -18,6 +20,8 @@ from .tasks import cache_agents_alert_template
|
||||
|
||||
|
||||
class GetAddAlerts(APIView):
|
||||
permission_classes = [IsAuthenticated, AlertPerms]
|
||||
|
||||
def patch(self, request):
|
||||
|
||||
# top 10 alerts for dashboard icon
|
||||
@@ -88,7 +92,8 @@ class GetAddAlerts(APIView):
|
||||
)
|
||||
|
||||
alerts = (
|
||||
Alert.objects.filter(clientFilter)
|
||||
Alert.objects.filter_by_role(request.user)
|
||||
.filter(clientFilter)
|
||||
.filter(severityFilter)
|
||||
.filter(resolvedFilter)
|
||||
.filter(snoozedFilter)
|
||||
@@ -97,7 +102,7 @@ class GetAddAlerts(APIView):
|
||||
return Response(AlertSerializer(alerts, many=True).data)
|
||||
|
||||
else:
|
||||
alerts = Alert.objects.all()
|
||||
alerts = Alert.objects.filter_by_role(request.user)
|
||||
return Response(AlertSerializer(alerts, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
@@ -109,9 +114,10 @@ class GetAddAlerts(APIView):
|
||||
|
||||
|
||||
class GetUpdateDeleteAlert(APIView):
|
||||
permission_classes = [IsAuthenticated, AlertPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
alert = get_object_or_404(Alert, pk=pk)
|
||||
|
||||
return Response(AlertSerializer(alert).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
@@ -163,6 +169,8 @@ class GetUpdateDeleteAlert(APIView):
|
||||
|
||||
|
||||
class BulkAlerts(APIView):
|
||||
permission_classes = [IsAuthenticated, AlertPerms]
|
||||
|
||||
def post(self, request):
|
||||
if request.data["bulk_action"] == "resolve":
|
||||
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||
@@ -185,9 +193,10 @@ class BulkAlerts(APIView):
|
||||
|
||||
|
||||
class GetAddAlertTemplates(APIView):
|
||||
permission_classes = [IsAuthenticated, AlertTemplatePerms]
|
||||
|
||||
def get(self, request):
|
||||
alert_templates = AlertTemplate.objects.all()
|
||||
|
||||
return Response(AlertTemplateSerializer(alert_templates, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
@@ -202,6 +211,8 @@ class GetAddAlertTemplates(APIView):
|
||||
|
||||
|
||||
class GetUpdateDeleteAlertTemplate(APIView):
|
||||
permission_classes = [IsAuthenticated, AlertTemplatePerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
|
||||
@@ -231,6 +242,8 @@ class GetUpdateDeleteAlertTemplate(APIView):
|
||||
|
||||
|
||||
class RelatedAlertTemplate(APIView):
|
||||
permission_classes = [IsAuthenticated, AlertTemplatePerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
return Response(AlertTemplateRelationSerializer(alert_template).data)
|
||||
|
||||
@@ -5,8 +5,8 @@ from unittest.mock import patch
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
@@ -130,42 +130,6 @@ class TestAPIv3(TacticalTestCase):
|
||||
self.assertIsInstance(r.json()["check_interval"], int)
|
||||
self.assertEqual(len(r.json()["checks"]), 15)
|
||||
|
||||
def test_checkin_patch(self):
|
||||
from logs.models import PendingAction
|
||||
|
||||
url = "/api/v3/checkin/"
|
||||
agent_updated = baker.make_recipe("agents.agent", version="1.3.0")
|
||||
PendingAction.objects.create(
|
||||
agent=agent_updated,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": agent_updated.winagent_dl,
|
||||
"version": agent_updated.version,
|
||||
"inno": agent_updated.win_inno_exe,
|
||||
},
|
||||
)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test agent failed to update and still on same version
|
||||
payload = {
|
||||
"func": "hello",
|
||||
"agent_id": agent_updated.agent_id,
|
||||
"version": "1.3.0",
|
||||
}
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test agent successful update
|
||||
payload["version"] = settings.LATEST_AGENT_VER
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "completed")
|
||||
action.delete()
|
||||
|
||||
@patch("apiv3.views.reload_nats")
|
||||
def test_agent_recovery(self, reload_nats):
|
||||
reload_nats.return_value = "ok"
|
||||
@@ -213,7 +177,8 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
script = baker.make_recipe("scripts.script")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent, script=script)
|
||||
|
||||
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
||||
|
||||
|
||||
@@ -20,4 +20,5 @@ urlpatterns = [
|
||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
|
||||
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
|
||||
]
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
@@ -15,117 +14,23 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from agents.models import Agent, AgentHistory
|
||||
from agents.serializers import AgentHistorySerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.models import Check
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from checks.utils import bytes2human
|
||||
from logs.models import PendingAction
|
||||
from logs.models import PendingAction, DebugLog
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
from alerts.models import Alert
|
||||
|
||||
updated = False
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if pyver.parse(request.data["version"]) > pyver.parse(
|
||||
agent.version
|
||||
) or pyver.parse(request.data["version"]) == pyver.parse(
|
||||
settings.LATEST_AGENT_VER
|
||||
):
|
||||
updated = True
|
||||
agent.version = request.data["version"]
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["version", "last_seen"])
|
||||
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
if (
|
||||
updated
|
||||
and agent.pendingactions.filter( # type: ignore
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists()
|
||||
):
|
||||
agent.pendingactions.filter( # type: ignore
|
||||
action_type="agentupdate", status="pending"
|
||||
).update(status="completed")
|
||||
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(agent)
|
||||
|
||||
# sync scheduled tasks
|
||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
if task.sync_status == "pendingdeletion":
|
||||
task.delete_task_on_agent()
|
||||
elif task.sync_status == "initial":
|
||||
task.modify_task_on_agent()
|
||||
elif task.sync_status == "notsynced":
|
||||
task.create_task_on_agent()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
|
||||
if request.data["func"] == "disks":
|
||||
disks = request.data["disks"]
|
||||
new = []
|
||||
for disk in disks:
|
||||
tmp = {}
|
||||
for _, _ in disk.items():
|
||||
tmp["device"] = disk["device"]
|
||||
tmp["fstype"] = disk["fstype"]
|
||||
tmp["total"] = bytes2human(disk["total"])
|
||||
tmp["used"] = bytes2human(disk["used"])
|
||||
tmp["free"] = bytes2human(disk["free"])
|
||||
tmp["percent"] = int(disk["percent"])
|
||||
new.append(tmp)
|
||||
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(disks=new)
|
||||
return Response("ok")
|
||||
|
||||
if request.data["func"] == "loggedonuser":
|
||||
if request.data["logged_in_username"] != "None":
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||
return Response("ok")
|
||||
|
||||
if request.data["func"] == "software":
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first() # type: ignore
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
# called once during tacticalagent windows service startup
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
@@ -167,22 +72,26 @@ class WinUpdates(APIView):
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
|
||||
needs_reboot: bool = request.data["needs_reboot"]
|
||||
agent.needs_reboot = needs_reboot
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["needs_reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
elif needs_reboot and reboot_policy == "required":
|
||||
reboot = True
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="windows_updates",
|
||||
message=f"{agent.hostname} is rebooting after updates were installed.",
|
||||
)
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
@@ -244,14 +153,6 @@ class WinUpdates(APIView):
|
||||
).save()
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
|
||||
# more superseded updates cleanup
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
|
||||
for u in agent.winupdates.filter( # type: ignore
|
||||
date_installed__isnull=True, result="failed"
|
||||
).exclude(installed=True):
|
||||
u.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -304,10 +205,11 @@ class CheckRunner(APIView):
|
||||
< djangotime.now()
|
||||
- djangotime.timedelta(seconds=check.run_interval)
|
||||
)
|
||||
# if check interval isn't set, make sure the agent's check interval has passed before running
|
||||
)
|
||||
# if check interval isn't set, make sure the agent's check interval has passed before running
|
||||
or (
|
||||
check.last_run
|
||||
not check.run_interval
|
||||
and check.last_run
|
||||
< djangotime.now() - djangotime.timedelta(seconds=agent.check_interval)
|
||||
)
|
||||
]
|
||||
@@ -320,11 +222,14 @@ class CheckRunner(APIView):
|
||||
|
||||
def patch(self, request):
|
||||
check = get_object_or_404(Check, pk=request.data["id"])
|
||||
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
status = check.handle_check(request.data)
|
||||
if status == "failing" and check.assignedtask.exists(): # type: ignore
|
||||
check.handle_assigned_task()
|
||||
|
||||
return Response(status)
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CheckRunnerInterval(APIView):
|
||||
@@ -344,13 +249,12 @@ class TaskRunner(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskGOGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk, agentid):
|
||||
from alerts.models import Alert
|
||||
from logs.models import AuditLog
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
@@ -361,33 +265,18 @@ class TaskRunner(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
new_task = serializer.save(last_run=djangotime.now())
|
||||
|
||||
AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="task_run",
|
||||
script=task.script,
|
||||
script_results=request.data,
|
||||
)
|
||||
|
||||
# check if task is a collector and update the custom field
|
||||
if task.custom_field:
|
||||
if not task.stderr:
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=task.custom_field, agent=task.agent
|
||||
).exists():
|
||||
agent_field = AgentCustomField.objects.get(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
else:
|
||||
agent_field = AgentCustomField.objects.create(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
|
||||
# get last line of stdout
|
||||
value = new_task.stdout.split("\n")[-1].strip()
|
||||
|
||||
if task.custom_field.type in ["text", "number", "single", "datetime"]:
|
||||
agent_field.string_value = value
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "multiple":
|
||||
agent_field.multiple_value = value.split(",")
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "checkbox":
|
||||
agent_field.bool_value = bool(value)
|
||||
agent_field.save()
|
||||
task.save_collector_results()
|
||||
|
||||
status = "passing"
|
||||
else:
|
||||
@@ -404,15 +293,6 @@ class TaskRunner(APIView):
|
||||
else:
|
||||
Alert.handle_alert_failure(new_task)
|
||||
|
||||
AuditLog.objects.create(
|
||||
username=agent.hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="task_run",
|
||||
message=f"Scheduled Task {task.name} was run on {agent.hostname}",
|
||||
after_value=AutomatedTask.serialize(new_task),
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -503,6 +383,7 @@ class NewAgent(APIView):
|
||||
action="agent_install",
|
||||
message=f"{request.user} installed new agent {agent.hostname}",
|
||||
after_value=Agent.serialize(agent),
|
||||
debug_info={"ip": request._client_ip},
|
||||
)
|
||||
|
||||
return Response(
|
||||
@@ -520,11 +401,7 @@ class Software(APIView):
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
sw = request.data["software"]
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
@@ -590,7 +467,18 @@ class AgentRecovery(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent = get_object_or_404(
|
||||
Agent.objects.prefetch_related("recoveryactions").only(
|
||||
"pk", "agent_id", "last_seen"
|
||||
),
|
||||
agent_id=agentid,
|
||||
)
|
||||
|
||||
# TODO remove these 2 lines after agent v1.7.0 has been out for a while
|
||||
# this is handled now by nats-api service
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last() # type: ignore
|
||||
ret = {"mode": "pass", "shellcmd": ""}
|
||||
if recovery is None:
|
||||
@@ -607,3 +495,16 @@ class AgentRecovery(APIView):
|
||||
reload_nats()
|
||||
|
||||
return Response(ret)
|
||||
|
||||
|
||||
class AgentHistoryResult(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request, agentid, pk):
|
||||
_ = get_object_or_404(Agent, agent_id=agentid)
|
||||
hist = get_object_or_404(AgentHistory, pk=pk)
|
||||
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
return Response("ok")
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-17 19:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("automation", "0008_auto_20210302_0415"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="policy",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="policy",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.db import models
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
|
||||
@@ -28,12 +29,11 @@ class Policy(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
@@ -50,7 +50,7 @@ class Policy(BaseAuditModel):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
super(Policy, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
||||
|
||||
@@ -126,9 +126,9 @@ class Policy(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(policy):
|
||||
# serializes the policy and returns json
|
||||
from .serializers import PolicySerializer
|
||||
from .serializers import PolicyAuditSerializer
|
||||
|
||||
return PolicySerializer(policy).data
|
||||
return PolicyAuditSerializer(policy).data
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
@@ -430,11 +430,12 @@ class Policy(BaseAuditModel):
|
||||
|
||||
# remove policy checks from agent that fell out of policy scope
|
||||
agent.agentchecks.filter(
|
||||
managed_by_policy=True,
|
||||
parent_check__in=[
|
||||
checkpk
|
||||
for checkpk in agent_checks_parent_pks
|
||||
if checkpk not in [check.pk for check in final_list]
|
||||
]
|
||||
],
|
||||
).delete()
|
||||
|
||||
return [
|
||||
|
||||
11
api/tacticalrmm/automation/permissions.py
Normal file
11
api/tacticalrmm/automation/permissions.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm
|
||||
|
||||
|
||||
class AutomationPolicyPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_automation_policies")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_automation_policies")
|
||||
@@ -8,7 +8,7 @@ from agents.serializers import AgentHostnameSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Policy
|
||||
@@ -21,25 +21,70 @@ class PolicySerializer(ModelSerializer):
|
||||
|
||||
|
||||
class PolicyTableSerializer(ModelSerializer):
|
||||
|
||||
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
||||
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
||||
agents_count = SerializerMethodField(read_only=True)
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
alert_template = ReadOnlyField(source="alert_template.id")
|
||||
excluded_clients = ClientSerializer(many=True)
|
||||
excluded_sites = SiteSerializer(many=True)
|
||||
excluded_agents = AgentHostnameSerializer(many=True)
|
||||
|
||||
class Meta:
|
||||
model = Policy
|
||||
fields = "__all__"
|
||||
depth = 1
|
||||
|
||||
def get_agents_count(self, policy):
|
||||
return policy.related_agents().count()
|
||||
|
||||
|
||||
class PolicyRelatedSerializer(ModelSerializer):
|
||||
workstation_clients = SerializerMethodField()
|
||||
server_clients = SerializerMethodField()
|
||||
workstation_sites = SerializerMethodField()
|
||||
server_sites = SerializerMethodField()
|
||||
agents = SerializerMethodField()
|
||||
|
||||
def get_agents(self, policy):
|
||||
return AgentHostnameSerializer(
|
||||
policy.agents.filter_by_role(self.context["user"]).only(
|
||||
"agent_id", "hostname"
|
||||
),
|
||||
many=True,
|
||||
).data
|
||||
|
||||
def get_workstation_clients(self, policy):
|
||||
return ClientMinimumSerializer(
|
||||
policy.workstation_clients.filter_by_role(self.context["user"]), many=True
|
||||
).data
|
||||
|
||||
def get_server_clients(self, policy):
|
||||
return ClientMinimumSerializer(
|
||||
policy.server_clients.filter_by_role(self.context["user"]), many=True
|
||||
).data
|
||||
|
||||
def get_workstation_sites(self, policy):
|
||||
return SiteMinimumSerializer(
|
||||
policy.workstation_sites.filter_by_role(self.context["user"]), many=True
|
||||
).data
|
||||
|
||||
def get_server_sites(self, policy):
|
||||
return SiteMinimumSerializer(
|
||||
policy.server_sites.filter_by_role(self.context["user"]), many=True
|
||||
).data
|
||||
|
||||
class Meta:
|
||||
model = Policy
|
||||
fields = (
|
||||
"pk",
|
||||
"name",
|
||||
"workstation_clients",
|
||||
"workstation_sites",
|
||||
"server_clients",
|
||||
"server_sites",
|
||||
"agents",
|
||||
"is_default_server_policy",
|
||||
"is_default_workstation_policy",
|
||||
)
|
||||
|
||||
|
||||
class PolicyOverviewSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
@@ -48,7 +93,6 @@ class PolicyOverviewSerializer(ModelSerializer):
|
||||
|
||||
|
||||
class PolicyCheckStatusSerializer(ModelSerializer):
|
||||
|
||||
hostname = ReadOnlyField(source="agent.hostname")
|
||||
|
||||
class Meta:
|
||||
@@ -57,7 +101,6 @@ class PolicyCheckStatusSerializer(ModelSerializer):
|
||||
|
||||
|
||||
class PolicyTaskStatusSerializer(ModelSerializer):
|
||||
|
||||
hostname = ReadOnlyField(source="agent.hostname")
|
||||
|
||||
class Meta:
|
||||
@@ -65,26 +108,7 @@ class PolicyTaskStatusSerializer(ModelSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class PolicyCheckSerializer(ModelSerializer):
|
||||
class PolicyAuditSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Check
|
||||
fields = (
|
||||
"id",
|
||||
"check_type",
|
||||
"readable_desc",
|
||||
"assignedtask",
|
||||
"text_alert",
|
||||
"email_alert",
|
||||
"dashboard_alert",
|
||||
)
|
||||
depth = 1
|
||||
|
||||
|
||||
class AutoTasksFieldSerializer(ModelSerializer):
|
||||
assigned_check = PolicyCheckSerializer(read_only=True)
|
||||
script = ReadOnlyField(source="script.id")
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
model = Policy
|
||||
fields = "__all__"
|
||||
depth = 1
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import Any, Dict, List, Union
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
|
||||
def generate_agent_checks_task(
|
||||
policy: int = None,
|
||||
site: int = None,
|
||||
@@ -13,7 +13,6 @@ def generate_agent_checks_task(
|
||||
create_tasks: bool = False,
|
||||
) -> Union[str, None]:
|
||||
from agents.models import Agent
|
||||
|
||||
from automation.models import Policy
|
||||
|
||||
p = Policy.objects.get(pk=policy) if policy else None
|
||||
@@ -55,10 +54,14 @@ def generate_agent_checks_task(
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
agent.set_alert_template()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
@app.task(
|
||||
acks_late=True, retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5}
|
||||
)
|
||||
# updates policy managed check fields on agents
|
||||
def update_policy_check_fields_task(check: int) -> str:
|
||||
from checks.models import Check
|
||||
@@ -74,11 +77,10 @@ def update_policy_check_fields_task(check: int) -> str:
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
|
||||
# generates policy tasks on agents affected by a policy
|
||||
def generate_agent_autotasks_task(policy: int = None) -> str:
|
||||
from agents.models import Agent
|
||||
|
||||
from automation.models import Policy
|
||||
|
||||
p: Policy = Policy.objects.get(pk=policy)
|
||||
@@ -102,7 +104,12 @@ def generate_agent_autotasks_task(policy: int = None) -> str:
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
@app.task(
|
||||
acks_late=True,
|
||||
retry_backoff=5,
|
||||
retry_jitter=True,
|
||||
retry_kwargs={"max_retries": 5},
|
||||
)
|
||||
def delete_policy_autotasks_task(task: int) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
@@ -122,7 +129,12 @@ def run_win_policy_autotasks_task(task: int) -> str:
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
@app.task(
|
||||
acks_late=True,
|
||||
retry_backoff=5,
|
||||
retry_jitter=True,
|
||||
retry_kwargs={"max_retries": 5},
|
||||
)
|
||||
def update_policy_autotasks_fields_task(task: int, update_agent: bool = False) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
|
||||
@@ -1,20 +1,16 @@
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker, seq
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
from .serializers import (
|
||||
AutoTasksFieldSerializer,
|
||||
PolicyCheckSerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicySerializer,
|
||||
PolicyTableSerializer,
|
||||
PolicyTaskStatusSerializer,
|
||||
)
|
||||
|
||||
@@ -27,12 +23,10 @@ class TestPolicyViews(TacticalTestCase):
|
||||
def test_get_all_policies(self):
|
||||
url = "/automation/policies/"
|
||||
|
||||
policies = baker.make("automation.Policy", _quantity=3)
|
||||
baker.make("automation.Policy", _quantity=3)
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = PolicyTableSerializer(policies, many=True)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
self.assertEqual(len(resp.data), 3)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -54,6 +48,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_add_policy(self, create_task):
|
||||
from automation.models import Policy
|
||||
|
||||
url = "/automation/policies/"
|
||||
|
||||
data = {
|
||||
@@ -72,8 +68,12 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# create policy with tasks and checks
|
||||
policy = baker.make("automation.Policy")
|
||||
self.create_checks(policy=policy)
|
||||
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
checks = self.create_checks(policy=policy)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
|
||||
# assign a task to a check
|
||||
tasks[0].assigned_check = checks[0] # type: ignore
|
||||
tasks[0].save() # type: ignore
|
||||
|
||||
# test copy tasks and checks to another policy
|
||||
data = {
|
||||
@@ -86,8 +86,16 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.post(f"/automation/policies/", data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(policy.autotasks.count(), 3) # type: ignore
|
||||
self.assertEqual(policy.policychecks.count(), 7) # type: ignore
|
||||
|
||||
copied_policy = Policy.objects.get(name=data["name"])
|
||||
|
||||
self.assertEqual(copied_policy.autotasks.count(), 3) # type: ignore
|
||||
self.assertEqual(copied_policy.policychecks.count(), 7) # type: ignore
|
||||
|
||||
# make sure correct task was assign to the check
|
||||
self.assertEqual(copied_policy.autotasks.get(name=tasks[0].name).assigned_check.check_type, checks[0].check_type) # type: ignore
|
||||
|
||||
create_task.assert_not_called()
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -110,7 +118,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# only called if active or enforced are updated
|
||||
# only called if active, enforced, or excluded objects are updated
|
||||
generate_agent_checks_task.assert_not_called()
|
||||
|
||||
data = {
|
||||
@@ -120,6 +128,23 @@ class TestPolicyViews(TacticalTestCase):
|
||||
"enforced": False,
|
||||
}
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
generate_agent_checks_task.assert_called_with(
|
||||
policy=policy.pk, create_tasks=True # type: ignore
|
||||
)
|
||||
generate_agent_checks_task.reset_mock()
|
||||
|
||||
# make sure policies are re-evaluated when excluded changes
|
||||
agents = baker.make_recipe("agents.agent", _quantity=2)
|
||||
clients = baker.make("clients.Client", _quantity=2)
|
||||
sites = baker.make("clients.Site", _quantity=2)
|
||||
data = {
|
||||
"excluded_agents": [agent.pk for agent in agents], # type: ignore
|
||||
"excluded_sites": [site.pk for site in sites], # type: ignore
|
||||
"excluded_clients": [client.pk for client in clients], # type: ignore
|
||||
}
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
generate_agent_checks_task.assert_called_with(
|
||||
@@ -151,38 +176,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_all_policy_tasks(self):
|
||||
# create policy with tasks
|
||||
policy = baker.make("automation.Policy")
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
url = f"/automation/{policy.pk}/policyautomatedtasks/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AutoTasksFieldSerializer(tasks, many=True)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
self.assertEqual(len(resp.data), 3) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_all_policy_checks(self):
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy")
|
||||
checks = self.create_checks(policy=policy)
|
||||
|
||||
url = f"/automation/{policy.pk}/policychecks/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = PolicyCheckSerializer(checks, many=True)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
self.assertEqual(len(resp.data), 7) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_policy_check_status(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
@@ -195,14 +188,14 @@ class TestPolicyViews(TacticalTestCase):
|
||||
managed_by_policy=True,
|
||||
parent_check=policy_diskcheck.pk,
|
||||
)
|
||||
url = f"/automation/policycheckstatus/{policy_diskcheck.pk}/check/"
|
||||
url = f"/automation/checks/{policy_diskcheck.pk}/status/"
|
||||
|
||||
resp = self.client.patch(url, format="json")
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = PolicyCheckStatusSerializer([managed_check], many=True)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
self.check_not_authenticated("patch", url)
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_policy_overview(self):
|
||||
from clients.models import Client
|
||||
@@ -262,15 +255,15 @@ class TestPolicyViews(TacticalTestCase):
|
||||
"autotasks.AutomatedTask", parent_task=task.id, _quantity=5 # type: ignore
|
||||
)
|
||||
|
||||
url = f"/automation/policyautomatedtaskstatus/{task.id}/task/" # type: ignore
|
||||
url = f"/automation/tasks/{task.id}/status/" # type: ignore
|
||||
|
||||
serializer = PolicyTaskStatusSerializer(policy_tasks, many=True)
|
||||
resp = self.client.patch(url, format="json")
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
self.assertEqual(len(resp.data), 5) # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("automation.tasks.run_win_policy_autotasks_task.delay")
|
||||
def test_run_win_task(self, mock_task):
|
||||
@@ -283,16 +276,16 @@ class TestPolicyViews(TacticalTestCase):
|
||||
_quantity=6,
|
||||
)
|
||||
|
||||
url = "/automation/runwintask/1/"
|
||||
resp = self.client.put(url, format="json")
|
||||
url = "/automation/tasks/1/run/"
|
||||
resp = self.client.post(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
mock_task.assert_called() # type: ignore
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_create_new_patch_policy(self):
|
||||
url = "/automation/winupdatepolicy/"
|
||||
url = "/automation/patchpolicy/"
|
||||
|
||||
# test policy doesn't exist
|
||||
data = {"policy": 500}
|
||||
@@ -323,15 +316,14 @@ class TestPolicyViews(TacticalTestCase):
|
||||
def test_update_patch_policy(self):
|
||||
|
||||
# test policy doesn't exist
|
||||
resp = self.client.put("/automation/winupdatepolicy/500/", format="json")
|
||||
resp = self.client.put("/automation/patchpolicy/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
policy = baker.make("automation.Policy")
|
||||
patch_policy = baker.make("winupdate.WinUpdatePolicy", policy=policy)
|
||||
url = f"/automation/winupdatepolicy/{patch_policy.pk}/" # type: ignore
|
||||
url = f"/automation/patchpolicy/{patch_policy.pk}/" # type: ignore
|
||||
|
||||
data = {
|
||||
"id": patch_policy.pk, # type: ignore
|
||||
"policy": policy.pk, # type: ignore
|
||||
"critical": "approve",
|
||||
"important": "approve",
|
||||
@@ -347,7 +339,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_reset_patch_policy(self):
|
||||
url = "/automation/winupdatepolicy/reset/"
|
||||
url = "/automation/patchpolicy/reset/"
|
||||
|
||||
inherit_fields = {
|
||||
"critical": "inherit",
|
||||
@@ -376,7 +368,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
# test reset agents in site
|
||||
data = {"site": sites[0].id} # type: ignore
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
agents = Agent.objects.filter(site=sites[0]) # type: ignore
|
||||
@@ -388,7 +380,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
# test reset agents in client
|
||||
data = {"client": clients[1].id} # type: ignore
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
agents = Agent.objects.filter(site__client=clients[1]) # type: ignore
|
||||
@@ -400,7 +392,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
# test reset all agents
|
||||
data = {}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
agents = Agent.objects.all()
|
||||
@@ -408,17 +400,17 @@ class TestPolicyViews(TacticalTestCase):
|
||||
for k, v in inherit_fields.items():
|
||||
self.assertEqual(getattr(agent.winupdatepolicy.get(), k), v)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_delete_patch_policy(self):
|
||||
# test patch policy doesn't exist
|
||||
resp = self.client.delete("/automation/winupdatepolicy/500/", format="json")
|
||||
resp = self.client.delete("/automation/patchpolicy/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
winupdate_policy = baker.make_recipe(
|
||||
"winupdate.winupdate_policy", policy__name="Test Policy"
|
||||
)
|
||||
url = f"/automation/winupdatepolicy/{winupdate_policy.pk}/"
|
||||
url = f"/automation/patchpolicy/{winupdate_policy.pk}/"
|
||||
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -473,7 +465,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
# Add Client to Policy
|
||||
policy.server_clients.add(server_agents[13].client) # type: ignore
|
||||
policy.workstation_clients.add(workstation_agents[15].client) # type: ignore
|
||||
policy.workstation_clients.add(workstation_agents[13].client) # type: ignore
|
||||
|
||||
resp = self.client.get(
|
||||
f"/automation/policies/{policy.pk}/related/", format="json" # type: ignore
|
||||
@@ -481,22 +473,28 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEquals(len(resp.data["server_clients"]), 1) # type: ignore
|
||||
self.assertEquals(len(resp.data["server_sites"]), 5) # type: ignore
|
||||
self.assertEquals(len(resp.data["server_sites"]), 0) # type: ignore
|
||||
self.assertEquals(len(resp.data["workstation_clients"]), 1) # type: ignore
|
||||
self.assertEquals(len(resp.data["workstation_sites"]), 5) # type: ignore
|
||||
self.assertEquals(len(resp.data["agents"]), 10) # type: ignore
|
||||
self.assertEquals(len(resp.data["workstation_sites"]), 0) # type: ignore
|
||||
self.assertEquals(len(resp.data["agents"]), 0) # type: ignore
|
||||
|
||||
# Add Site to Policy and the agents and sites length shouldn't change
|
||||
policy.server_sites.add(server_agents[13].site) # type: ignore
|
||||
policy.workstation_sites.add(workstation_agents[15].site) # type: ignore
|
||||
self.assertEquals(len(resp.data["server_sites"]), 5) # type: ignore
|
||||
self.assertEquals(len(resp.data["workstation_sites"]), 5) # type: ignore
|
||||
self.assertEquals(len(resp.data["agents"]), 10) # type: ignore
|
||||
# Add Site to Policy
|
||||
policy.server_sites.add(server_agents[10].site) # type: ignore
|
||||
policy.workstation_sites.add(workstation_agents[10].site) # type: ignore
|
||||
resp = self.client.get(
|
||||
f"/automation/policies/{policy.pk}/related/", format="json" # type: ignore
|
||||
)
|
||||
self.assertEquals(len(resp.data["server_sites"]), 1) # type: ignore
|
||||
self.assertEquals(len(resp.data["workstation_sites"]), 1) # type: ignore
|
||||
self.assertEquals(len(resp.data["agents"]), 0) # type: ignore
|
||||
|
||||
# Add Agent to Policy and the agents length shouldn't change
|
||||
policy.agents.add(server_agents[13]) # type: ignore
|
||||
policy.agents.add(workstation_agents[15]) # type: ignore
|
||||
self.assertEquals(len(resp.data["agents"]), 10) # type: ignore
|
||||
# Add Agent to Policy
|
||||
policy.agents.add(server_agents[2]) # type: ignore
|
||||
policy.agents.add(workstation_agents[2]) # type: ignore
|
||||
resp = self.client.get(
|
||||
f"/automation/policies/{policy.pk}/related/", format="json" # type: ignore
|
||||
)
|
||||
self.assertEquals(len(resp.data["agents"]), 2) # type: ignore
|
||||
|
||||
def test_generating_agent_policy_checks(self):
|
||||
from .tasks import generate_agent_checks_task
|
||||
@@ -771,6 +769,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_generating_policy_checks_for_all_agents(self, generate_agent_checks_mock):
|
||||
from core.models import CoreSettings
|
||||
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
@@ -887,11 +886,13 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.delete_task_on_agent")
|
||||
def test_delete_policy_tasks(self, delete_task_on_agent, create_task):
|
||||
from .tasks import delete_policy_autotasks_task
|
||||
from .tasks import delete_policy_autotasks_task, generate_agent_checks_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
delete_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
@@ -900,11 +901,13 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.run_win_task")
|
||||
def test_run_policy_task(self, run_win_task, create_task):
|
||||
from .tasks import run_win_policy_autotasks_task
|
||||
from .tasks import run_win_policy_autotasks_task, generate_agent_checks_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
run_win_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
@@ -913,7 +916,10 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.modify_task_on_agent")
|
||||
def test_update_policy_tasks(self, modify_task_on_agent, create_task):
|
||||
from .tasks import update_policy_autotasks_fields_task
|
||||
from .tasks import (
|
||||
update_policy_autotasks_fields_task,
|
||||
generate_agent_checks_task,
|
||||
)
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -925,6 +931,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
tasks[0].enabled = False # type: ignore
|
||||
tasks[0].save() # type: ignore
|
||||
|
||||
@@ -964,6 +972,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_policy_exclusions(self, create_task):
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make_recipe("checks.memory_check", policy=policy)
|
||||
@@ -972,6 +982,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
"agents.agent", policy=policy, monitoring_type="server"
|
||||
)
|
||||
|
||||
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
|
||||
|
||||
# make sure related agents on policy returns correctly
|
||||
self.assertEqual(policy.related_agents().count(), 1) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 1) # type: ignore
|
||||
@@ -1133,3 +1145,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
# should get policies from agent policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
|
||||
class TestAutomationPermission(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.client_setup()
|
||||
self.setup_coresettings()
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
from checks.views import GetAddChecks
|
||||
from autotasks.views import GetAddAutoTasks
|
||||
|
||||
urlpatterns = [
|
||||
path("policies/", views.GetAddPolicies.as_view()),
|
||||
@@ -8,12 +10,14 @@ urlpatterns = [
|
||||
path("policies/overview/", views.OverviewPolicy.as_view()),
|
||||
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
||||
path("sync/", views.PolicySync.as_view()),
|
||||
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
||||
path("<int:pk>/policyautomatedtasks/", views.PolicyAutoTask.as_view()),
|
||||
path("policycheckstatus/<int:check>/check/", views.PolicyCheck.as_view()),
|
||||
path("policyautomatedtaskstatus/<int:task>/task/", views.PolicyAutoTask.as_view()),
|
||||
path("runwintask/<int:task>/", views.PolicyAutoTask.as_view()),
|
||||
path("winupdatepolicy/", views.UpdatePatchPolicy.as_view()),
|
||||
path("winupdatepolicy/<int:patchpolicy>/", views.UpdatePatchPolicy.as_view()),
|
||||
path("winupdatepolicy/reset/", views.UpdatePatchPolicy.as_view()),
|
||||
# alias to get policy checks
|
||||
path("policies/<int:policy>/checks/", GetAddChecks.as_view()),
|
||||
# alias to get policy tasks
|
||||
path("policies/<int:policy>/tasks/", GetAddAutoTasks.as_view()),
|
||||
path("checks/<int:check>/status/", views.PolicyCheck.as_view()),
|
||||
path("tasks/<int:task>/status/", views.PolicyAutoTask.as_view()),
|
||||
path("tasks/<int:task>/run/", views.PolicyAutoTask.as_view()),
|
||||
path("patchpolicy/", views.UpdatePatchPolicy.as_view()),
|
||||
path("patchpolicy/<int:pk>/", views.UpdatePatchPolicy.as_view()),
|
||||
path("patchpolicy/reset/", views.ResetPatchPolicy.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
from agents.models import Agent
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.permissions import _has_perm_on_client, _has_perm_on_site
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Policy
|
||||
from .permissions import AutomationPolicyPerms
|
||||
from .serializers import (
|
||||
AutoTasksFieldSerializer,
|
||||
PolicyCheckSerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
PolicyRelatedSerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicySerializer,
|
||||
PolicyTableSerializer,
|
||||
@@ -24,10 +25,16 @@ from .serializers import (
|
||||
|
||||
|
||||
class GetAddPolicies(APIView):
|
||||
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||
|
||||
def get(self, request):
|
||||
policies = Policy.objects.all()
|
||||
|
||||
return Response(PolicyTableSerializer(policies, many=True).data)
|
||||
return Response(
|
||||
PolicyTableSerializer(
|
||||
policies, context={"user": request.user}, many=True
|
||||
).data
|
||||
)
|
||||
|
||||
def post(self, request):
|
||||
serializer = PolicySerializer(data=request.data, partial=True)
|
||||
@@ -51,18 +58,30 @@ class GetAddPolicies(APIView):
|
||||
|
||||
|
||||
class GetUpdateDeletePolicy(APIView):
|
||||
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
return Response(PolicySerializer(policy).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
# check for excluding objects and in the request and if present generate policies
|
||||
if (
|
||||
"excluded_sites" in request.data.keys()
|
||||
or "excluded_clients" in request.data.keys()
|
||||
or "excluded_agents" in request.data.keys()
|
||||
):
|
||||
generate_agent_checks_task.delay(policy=pk, create_tasks=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
@@ -87,18 +106,13 @@ class PolicySync(APIView):
|
||||
|
||||
class PolicyAutoTask(APIView):
|
||||
|
||||
# tasks associated with policy
|
||||
def get(self, request, pk):
|
||||
tasks = AutomatedTask.objects.filter(policy=pk)
|
||||
return Response(AutoTasksFieldSerializer(tasks, many=True).data)
|
||||
|
||||
# get status of all tasks
|
||||
def patch(self, request, task):
|
||||
def get(self, request, task):
|
||||
tasks = AutomatedTask.objects.filter(parent_task=task)
|
||||
return Response(PolicyTaskStatusSerializer(tasks, many=True).data)
|
||||
|
||||
# bulk run win tasks associated with policy
|
||||
def put(self, request, task):
|
||||
def post(self, request, task):
|
||||
from .tasks import run_win_policy_autotasks_task
|
||||
|
||||
run_win_policy_autotasks_task.delay(task=task)
|
||||
@@ -106,11 +120,9 @@ class PolicyAutoTask(APIView):
|
||||
|
||||
|
||||
class PolicyCheck(APIView):
|
||||
def get(self, request, pk):
|
||||
checks = Check.objects.filter(policy__pk=pk, agent=None)
|
||||
return Response(PolicyCheckSerializer(checks, many=True).data)
|
||||
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||
|
||||
def patch(self, request, check):
|
||||
def get(self, request, check):
|
||||
checks = Check.objects.filter(parent_check=check)
|
||||
return Response(PolicyCheckStatusSerializer(checks, many=True).data)
|
||||
|
||||
@@ -125,8 +137,6 @@ class OverviewPolicy(APIView):
|
||||
class GetRelated(APIView):
|
||||
def get(self, request, pk):
|
||||
|
||||
response = {}
|
||||
|
||||
policy = (
|
||||
Policy.objects.filter(pk=pk)
|
||||
.prefetch_related(
|
||||
@@ -138,47 +148,13 @@ class GetRelated(APIView):
|
||||
.first()
|
||||
)
|
||||
|
||||
response["default_server_policy"] = policy.is_default_server_policy
|
||||
response["default_workstation_policy"] = policy.is_default_workstation_policy
|
||||
|
||||
response["server_clients"] = ClientSerializer(
|
||||
policy.server_clients.all(), many=True
|
||||
).data
|
||||
response["workstation_clients"] = ClientSerializer(
|
||||
policy.workstation_clients.all(), many=True
|
||||
).data
|
||||
|
||||
filtered_server_sites = list()
|
||||
filtered_workstation_sites = list()
|
||||
|
||||
for client in policy.server_clients.all():
|
||||
for site in client.sites.all():
|
||||
if site not in policy.server_sites.all():
|
||||
filtered_server_sites.append(site)
|
||||
|
||||
response["server_sites"] = SiteSerializer(
|
||||
filtered_server_sites + list(policy.server_sites.all()), many=True
|
||||
).data
|
||||
|
||||
for client in policy.workstation_clients.all():
|
||||
for site in client.sites.all():
|
||||
if site not in policy.workstation_sites.all():
|
||||
filtered_workstation_sites.append(site)
|
||||
|
||||
response["workstation_sites"] = SiteSerializer(
|
||||
filtered_workstation_sites + list(policy.workstation_sites.all()), many=True
|
||||
).data
|
||||
|
||||
response["agents"] = AgentHostnameSerializer(
|
||||
policy.related_agents().only("pk", "hostname"),
|
||||
many=True,
|
||||
).data
|
||||
|
||||
return Response(response)
|
||||
return Response(
|
||||
PolicyRelatedSerializer(policy, context={"user": request.user}).data
|
||||
)
|
||||
|
||||
|
||||
class UpdatePatchPolicy(APIView):
|
||||
|
||||
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||
# create new patch policy
|
||||
def post(self, request):
|
||||
policy = get_object_or_404(Policy, pk=request.data["policy"])
|
||||
@@ -191,8 +167,8 @@ class UpdatePatchPolicy(APIView):
|
||||
return Response("ok")
|
||||
|
||||
# update patch policy
|
||||
def put(self, request, patchpolicy):
|
||||
policy = get_object_or_404(WinUpdatePolicy, pk=patchpolicy)
|
||||
def put(self, request, pk):
|
||||
policy = get_object_or_404(WinUpdatePolicy, pk=pk)
|
||||
|
||||
serializer = WinUpdatePolicySerializer(
|
||||
instance=policy, data=request.data, partial=True
|
||||
@@ -202,20 +178,41 @@ class UpdatePatchPolicy(APIView):
|
||||
|
||||
return Response("ok")
|
||||
|
||||
# bulk reset agent patch policy
|
||||
def patch(self, request):
|
||||
# delete patch policy
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(WinUpdatePolicy, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class ResetPatchPolicy(APIView):
|
||||
# bulk reset agent patch policy
|
||||
def post(self, request):
|
||||
|
||||
agents = None
|
||||
if "client" in request.data:
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site__client_id=request.data["client"]
|
||||
if not _has_perm_on_client(request.user, request.data["client"]):
|
||||
raise PermissionDenied()
|
||||
|
||||
agents = (
|
||||
Agent.objects.filter_by_role(request.user)
|
||||
.prefetch_related("winupdatepolicy")
|
||||
.filter(site__client_id=request.data["client"])
|
||||
)
|
||||
elif "site" in request.data:
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site_id=request.data["site"]
|
||||
if not _has_perm_on_site(request.user, request.data["site"]):
|
||||
raise PermissionDenied()
|
||||
|
||||
agents = (
|
||||
Agent.objects.filter_by_role(request.user)
|
||||
.prefetch_related("winupdatepolicy")
|
||||
.filter(site_id=request.data["site"])
|
||||
)
|
||||
else:
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
|
||||
agents = (
|
||||
Agent.objects.filter_by_role(request.user)
|
||||
.prefetch_related("winupdatepolicy")
|
||||
.only("pk")
|
||||
)
|
||||
|
||||
for agent in agents:
|
||||
winupdatepolicy = agent.winupdatepolicy.get()
|
||||
@@ -240,10 +237,4 @@ class UpdatePatchPolicy(APIView):
|
||||
]
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
# delete patch policy
|
||||
def delete(self, request, patchpolicy):
|
||||
get_object_or_404(WinUpdatePolicy, pk=patchpolicy).delete()
|
||||
|
||||
return Response("ok")
|
||||
return Response("The patch policy on the affected agents has been reset.")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-04 00:32
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-27 14:11
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.1 on 2021-05-29 03:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0021_alter_automatedtask_custom_field'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='collector_all_output',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-17 19:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0022_automatedtask_collector_all_output"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="automatedtask",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="automatedtask",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,87 @@
|
||||
# Generated by Django 3.2.9 on 2021-12-14 00:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0023_auto_20210917_1954'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='run_time_days',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='actions',
|
||||
field=models.JSONField(default=dict),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='daily_interval',
|
||||
field=models.PositiveSmallIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='expire_date',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='monthly_days_of_month',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='monthly_months_of_year',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='monthly_weeks_of_month',
|
||||
field=models.PositiveSmallIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='random_task_delay',
|
||||
field=models.CharField(blank=True, max_length=10, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='stop_task_at_duration_end',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='task_instance_policy',
|
||||
field=models.PositiveSmallIntegerField(blank=True, default=1),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='task_repetition_duration',
|
||||
field=models.CharField(blank=True, max_length=10, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='task_repetition_interval',
|
||||
field=models.CharField(blank=True, max_length=10, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='weekly_interval',
|
||||
field=models.PositiveSmallIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='task_type',
|
||||
field=models.CharField(choices=[('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('monthlydow', 'Monthly Day of Week'), ('checkfailure', 'On Check Failure'), ('manual', 'Manual'), ('runonce', 'Run Once')], default='manual', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='timeout',
|
||||
field=models.PositiveIntegerField(blank=True, default=120),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.10 on 2021-12-29 14:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0024_auto_20211214_0040'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='continue_on_error',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.10 on 2021-12-30 14:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0025_automatedtask_continue_on_error'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='monthly_days_of_month',
|
||||
field=models.PositiveBigIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.2.11 on 2022-01-07 06:43
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0026_alter_automatedtask_monthly_days_of_month'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='daily_interval',
|
||||
field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(255)]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='weekly_interval',
|
||||
field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(52)]),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.11 on 2022-01-09 21:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0027_auto_20220107_0643'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='actions',
|
||||
field=models.JSONField(default=list),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.10 on 2022-01-10 01:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0028_alter_automatedtask_actions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='task_type',
|
||||
field=models.CharField(choices=[('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('monthlydow', 'Monthly Day of Week'), ('checkfailure', 'On Check Failure'), ('manual', 'Manual'), ('runonce', 'Run Once'), ('scheduled', 'Scheduled')], default='manual', max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -3,36 +3,36 @@ import datetime as dt
|
||||
import random
|
||||
import string
|
||||
from typing import List
|
||||
from django.db.models.fields.json import JSONField
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from django.db.utils import DatabaseError
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
from logs.models import BaseAuditModel, DebugLog
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
from packaging import version as pyver
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
(1, "Tuesday"),
|
||||
(2, "Wednesday"),
|
||||
(3, "Thursday"),
|
||||
(4, "Friday"),
|
||||
(5, "Saturday"),
|
||||
(6, "Sunday"),
|
||||
]
|
||||
from tacticalrmm.utils import (
|
||||
bitdays_to_string,
|
||||
bitmonthdays_to_string,
|
||||
bitmonths_to_string,
|
||||
bitweeks_to_string,
|
||||
convert_to_iso_duration,
|
||||
)
|
||||
|
||||
TASK_TYPE_CHOICES = [
|
||||
("scheduled", "Scheduled"),
|
||||
("daily", "Daily"),
|
||||
("weekly", "Weekly"),
|
||||
("monthly", "Monthly"),
|
||||
("monthlydow", "Monthly Day of Week"),
|
||||
("checkfailure", "On Check Failure"),
|
||||
("manual", "Manual"),
|
||||
("runonce", "Run Once"),
|
||||
("scheduled", "Scheduled"), # deprecated
|
||||
]
|
||||
|
||||
SYNC_STATUS_CHOICES = [
|
||||
@@ -50,6 +50,8 @@ TASK_STATUS_CHOICES = [
|
||||
|
||||
|
||||
class AutomatedTask(BaseAuditModel):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
agent = models.ForeignKey(
|
||||
"agents.Agent",
|
||||
related_name="autotasks",
|
||||
@@ -71,6 +73,8 @@ class AutomatedTask(BaseAuditModel):
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
# deprecated
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
@@ -78,12 +82,18 @@ class AutomatedTask(BaseAuditModel):
|
||||
related_name="autoscript",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
# deprecated
|
||||
script_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
# deprecated
|
||||
timeout = models.PositiveIntegerField(blank=True, default=120)
|
||||
|
||||
# format -> {"actions": [{"type": "script", "script": 1, "name": "Script Name", "timeout": 90, "script_args": []}, {"type": "cmd", "command": "whoami", "timeout": 90}]}
|
||||
actions = JSONField(default=list)
|
||||
assigned_check = models.ForeignKey(
|
||||
"checks.Check",
|
||||
null=True,
|
||||
@@ -92,25 +102,9 @@ class AutomatedTask(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
run_time_bit_weekdays = models.IntegerField(null=True, blank=True)
|
||||
# run_time_days is deprecated, use bit weekdays
|
||||
run_time_days = ArrayField(
|
||||
models.IntegerField(choices=RUN_TIME_DAY_CHOICES, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
run_time_minute = models.CharField(max_length=5, null=True, blank=True)
|
||||
task_type = models.CharField(
|
||||
max_length=100, choices=TASK_TYPE_CHOICES, default="manual"
|
||||
)
|
||||
run_time_date = DateTimeField(null=True, blank=True)
|
||||
remove_if_not_scheduled = models.BooleanField(default=False)
|
||||
run_asap_after_missed = models.BooleanField(default=False) # added in agent v1.4.7
|
||||
collector_all_output = models.BooleanField(default=False)
|
||||
managed_by_policy = models.BooleanField(default=False)
|
||||
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
||||
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
timeout = models.PositiveIntegerField(default=120)
|
||||
retvalue = models.TextField(null=True, blank=True)
|
||||
retcode = models.IntegerField(null=True, blank=True)
|
||||
stdout = models.TextField(null=True, blank=True)
|
||||
@@ -118,6 +112,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
execution_time = models.CharField(max_length=100, default="0.0000")
|
||||
last_run = models.DateTimeField(null=True, blank=True)
|
||||
enabled = models.BooleanField(default=True)
|
||||
continue_on_error = models.BooleanField(default=True)
|
||||
status = models.CharField(
|
||||
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
||||
)
|
||||
@@ -131,9 +126,80 @@ class AutomatedTask(BaseAuditModel):
|
||||
text_alert = models.BooleanField(default=False)
|
||||
dashboard_alert = models.BooleanField(default=False)
|
||||
|
||||
# options sent to agent for task creation
|
||||
# general task settings
|
||||
task_type = models.CharField(
|
||||
max_length=100, choices=TASK_TYPE_CHOICES, default="manual"
|
||||
)
|
||||
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
run_time_date = DateTimeField(null=True, blank=True)
|
||||
expire_date = DateTimeField(null=True, blank=True)
|
||||
|
||||
# daily
|
||||
daily_interval = models.PositiveSmallIntegerField(
|
||||
blank=True, null=True, validators=[MinValueValidator(1), MaxValueValidator(255)]
|
||||
)
|
||||
|
||||
# weekly
|
||||
run_time_bit_weekdays = models.IntegerField(null=True, blank=True)
|
||||
weekly_interval = models.PositiveSmallIntegerField(
|
||||
blank=True, null=True, validators=[MinValueValidator(1), MaxValueValidator(52)]
|
||||
)
|
||||
run_time_minute = models.CharField(
|
||||
max_length=5, null=True, blank=True
|
||||
) # deprecated
|
||||
|
||||
# monthly
|
||||
monthly_days_of_month = models.PositiveBigIntegerField(blank=True, null=True)
|
||||
monthly_months_of_year = models.PositiveIntegerField(blank=True, null=True)
|
||||
|
||||
# monthly days of week
|
||||
monthly_weeks_of_month = models.PositiveSmallIntegerField(blank=True, null=True)
|
||||
|
||||
# additional task settings
|
||||
task_repetition_duration = models.CharField(max_length=10, null=True, blank=True)
|
||||
task_repetition_interval = models.CharField(max_length=10, null=True, blank=True)
|
||||
stop_task_at_duration_end = models.BooleanField(blank=True, default=False)
|
||||
random_task_delay = models.CharField(max_length=10, null=True, blank=True)
|
||||
remove_if_not_scheduled = models.BooleanField(default=False)
|
||||
run_asap_after_missed = models.BooleanField(default=False) # added in agent v1.4.7
|
||||
task_instance_policy = models.PositiveSmallIntegerField(blank=True, default=1)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from autotasks.tasks import modify_win_task
|
||||
from automation.tasks import update_policy_autotasks_fields_task
|
||||
|
||||
# get old agent if exists
|
||||
old_task = AutomatedTask.objects.get(pk=self.pk) if self.pk else None
|
||||
super(AutomatedTask, self).save(old_model=old_task, *args, **kwargs)
|
||||
|
||||
# check if fields were updated that require a sync to the agent
|
||||
update_agent = False
|
||||
if old_task:
|
||||
for field in self.fields_that_trigger_task_update_on_agent:
|
||||
if getattr(self, field) != getattr(old_task, field):
|
||||
update_agent = True
|
||||
break
|
||||
|
||||
# check if automated task was enabled/disabled and send celery task
|
||||
if old_task and old_task.agent and update_agent:
|
||||
modify_win_task.delay(pk=self.pk)
|
||||
|
||||
# check if policy task was edited and then check if it was a field worth copying to rest of agent tasks
|
||||
elif old_task and old_task.policy:
|
||||
if update_agent:
|
||||
update_policy_autotasks_fields_task.delay(
|
||||
task=self.pk, update_agent=update_agent
|
||||
)
|
||||
else:
|
||||
for field in self.policy_fields_to_copy:
|
||||
if getattr(self, field) != getattr(old_task, field):
|
||||
update_policy_autotasks_fields_task.delay(task=self.pk)
|
||||
break
|
||||
|
||||
@property
|
||||
def schedule(self):
|
||||
if self.task_type == "manual":
|
||||
@@ -142,13 +208,30 @@ class AutomatedTask(BaseAuditModel):
|
||||
return "Every time check fails"
|
||||
elif self.task_type == "runonce":
|
||||
return f'Run once on {self.run_time_date.strftime("%m/%d/%Y %I:%M%p")}'
|
||||
elif self.task_type == "scheduled":
|
||||
run_time_nice = dt.datetime.strptime(
|
||||
self.run_time_minute, "%H:%M"
|
||||
).strftime("%I:%M %p")
|
||||
|
||||
elif self.task_type == "daily":
|
||||
run_time_nice = self.run_time_date.strftime("%I:%M%p")
|
||||
if self.daily_interval == 1:
|
||||
return f"Daily at {run_time_nice}"
|
||||
else:
|
||||
return f"Every {self.daily_interval} days at {run_time_nice}"
|
||||
elif self.task_type == "weekly":
|
||||
run_time_nice = self.run_time_date.strftime("%I:%M%p")
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"{days} at {run_time_nice}"
|
||||
if self.weekly_interval != 1:
|
||||
return f"{days} at {run_time_nice}"
|
||||
else:
|
||||
return f"{days} at {run_time_nice} every {self.weekly_interval} weeks"
|
||||
elif self.task_type == "monthly":
|
||||
run_time_nice = self.run_time_date.strftime("%I:%M%p")
|
||||
months = bitmonths_to_string(self.monthly_months_of_year)
|
||||
days = bitmonthdays_to_string(self.monthly_days_of_month)
|
||||
return f"Runs on {months} on days {days} at {run_time_nice}"
|
||||
elif self.task_type == "monthlydow":
|
||||
run_time_nice = self.run_time_date.strftime("%I:%M%p")
|
||||
months = bitmonths_to_string(self.monthly_months_of_year)
|
||||
weeks = bitweeks_to_string(self.monthly_weeks_of_month)
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"Runs on {months} on {weeks} on {days} at {run_time_nice}"
|
||||
|
||||
@property
|
||||
def last_run_as_timezone(self):
|
||||
@@ -167,21 +250,53 @@ class AutomatedTask(BaseAuditModel):
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"dashboard_alert",
|
||||
"script",
|
||||
"script_args",
|
||||
"assigned_check",
|
||||
"name",
|
||||
"run_time_days",
|
||||
"run_time_minute",
|
||||
"actions",
|
||||
"run_time_bit_weekdays",
|
||||
"run_time_date",
|
||||
"expire_date",
|
||||
"daily_interval",
|
||||
"weekly_interval",
|
||||
"task_type",
|
||||
"win_task_name",
|
||||
"timeout",
|
||||
"enabled",
|
||||
"remove_if_not_scheduled",
|
||||
"run_asap_after_missed",
|
||||
"custom_field",
|
||||
"collector_all_output",
|
||||
"monthly_days_of_month",
|
||||
"monthly_months_of_year",
|
||||
"monthly_weeks_of_month",
|
||||
"task_repetition_duration",
|
||||
"task_repetition_interval",
|
||||
"stop_task_at_duration_end",
|
||||
"random_task_delay",
|
||||
"run_asap_after_missed",
|
||||
"task_instance_policy",
|
||||
"continue_on_error",
|
||||
]
|
||||
|
||||
@property
|
||||
def fields_that_trigger_task_update_on_agent(self) -> List[str]:
|
||||
return [
|
||||
"run_time_bit_weekdays",
|
||||
"run_time_date",
|
||||
"expire_date",
|
||||
"daily_interval",
|
||||
"weekly_interval",
|
||||
"enabled",
|
||||
"remove_if_not_scheduled",
|
||||
"run_asap_after_missed",
|
||||
"monthly_days_of_month",
|
||||
"monthly_months_of_year",
|
||||
"monthly_weeks_of_month",
|
||||
"task_repetition_duration",
|
||||
"task_repetition_interval",
|
||||
"stop_task_at_duration_end",
|
||||
"random_task_delay",
|
||||
"run_asap_after_missed",
|
||||
"task_instance_policy",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
@@ -192,37 +307,31 @@ class AutomatedTask(BaseAuditModel):
|
||||
@staticmethod
|
||||
def serialize(task):
|
||||
# serializes the task and returns json
|
||||
from .serializers import TaskSerializer
|
||||
from .serializers import TaskAuditSerializer
|
||||
|
||||
return TaskSerializer(task).data
|
||||
return TaskAuditSerializer(task).data
|
||||
|
||||
def create_policy_task(self, agent=None, policy=None):
|
||||
def create_policy_task(self, agent=None, policy=None, assigned_check=None):
|
||||
|
||||
# added to allow new policy tasks to be assigned to check only when the agent check exists already
|
||||
if (
|
||||
self.assigned_check
|
||||
and agent
|
||||
and agent.agentchecks.filter(parent_check=self.assigned_check.id).exists()
|
||||
):
|
||||
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.id)
|
||||
|
||||
# if policy is present, then this task is being copied to another policy
|
||||
# if agent is present, then this task is being created on an agent from a policy
|
||||
# exit if neither are set or if both are set
|
||||
if not agent and not policy or agent and policy:
|
||||
# also exit if assigned_check is set because this task will be created when the check is
|
||||
if (
|
||||
(not agent and not policy)
|
||||
or (agent and policy)
|
||||
or (self.assigned_check and not assigned_check)
|
||||
):
|
||||
return
|
||||
|
||||
assigned_check = None
|
||||
|
||||
# get correct assigned check to task if set
|
||||
if agent and self.assigned_check:
|
||||
# check if there is a matching check on the agent
|
||||
if agent.agentchecks.filter(parent_check=self.assigned_check.pk).exists():
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
parent_check=self.assigned_check.pk
|
||||
).first()
|
||||
elif policy and self.assigned_check:
|
||||
if policy.policychecks.filter(name=self.assigned_check.name).exists():
|
||||
assigned_check = policy.policychecks.filter(
|
||||
name=self.assigned_check.name
|
||||
).first()
|
||||
else:
|
||||
assigned_check = policy.policychecks.filter(
|
||||
check_type=self.assigned_check.check_type
|
||||
).first()
|
||||
|
||||
task = AutomatedTask.objects.create(
|
||||
agent=agent,
|
||||
policy=policy,
|
||||
@@ -232,11 +341,86 @@ class AutomatedTask(BaseAuditModel):
|
||||
)
|
||||
|
||||
for field in self.policy_fields_to_copy:
|
||||
setattr(task, field, getattr(self, field))
|
||||
if field != "assigned_check":
|
||||
setattr(task, field, getattr(self, field))
|
||||
|
||||
task.save()
|
||||
|
||||
task.create_task_on_agent()
|
||||
if agent:
|
||||
task.create_task_on_agent()
|
||||
|
||||
# agent version >= 1.8.0
|
||||
def generate_nats_task_payload(self, editing=False):
|
||||
task = {
|
||||
"pk": self.pk,
|
||||
"type": "rmm",
|
||||
"name": self.win_task_name,
|
||||
"overwrite_task": editing,
|
||||
"enabled": self.enabled,
|
||||
"trigger": self.task_type if self.task_type != "checkfailure" else "manual",
|
||||
"multiple_instances": self.task_instance_policy
|
||||
if self.task_instance_policy
|
||||
else 0,
|
||||
"delete_expired_task_after": self.remove_if_not_scheduled
|
||||
if self.expire_date
|
||||
else False,
|
||||
"start_when_available": self.run_asap_after_missed
|
||||
if self.task_type != "runonce"
|
||||
else True,
|
||||
}
|
||||
|
||||
if self.task_type in ["runonce", "daily", "weekly", "monthly", "monthlydow"]:
|
||||
|
||||
task["start_year"] = int(self.run_time_date.strftime("%Y"))
|
||||
task["start_month"] = int(self.run_time_date.strftime("%-m"))
|
||||
task["start_day"] = int(self.run_time_date.strftime("%-d"))
|
||||
task["start_hour"] = int(self.run_time_date.strftime("%-H"))
|
||||
task["start_min"] = int(self.run_time_date.strftime("%-M"))
|
||||
|
||||
if self.expire_date:
|
||||
task["expire_year"] = int(self.expire_date.strftime("%Y"))
|
||||
task["expire_month"] = int(self.expire_date.strftime("%-m"))
|
||||
task["expire_day"] = int(self.expire_date.strftime("%-d"))
|
||||
task["expire_hour"] = int(self.expire_date.strftime("%-H"))
|
||||
task["expire_min"] = int(self.expire_date.strftime("%-M"))
|
||||
|
||||
if self.random_task_delay:
|
||||
task["random_delay"] = convert_to_iso_duration(self.random_task_delay)
|
||||
|
||||
if self.task_repetition_interval:
|
||||
task["repetition_interval"] = convert_to_iso_duration(
|
||||
self.task_repetition_interval
|
||||
)
|
||||
task["repetition_duration"] = convert_to_iso_duration(
|
||||
self.task_repetition_duration
|
||||
)
|
||||
task["stop_at_duration_end"] = self.stop_task_at_duration_end
|
||||
|
||||
if self.task_type == "daily":
|
||||
task["day_interval"] = self.daily_interval
|
||||
|
||||
elif self.task_type == "weekly":
|
||||
task["week_interval"] = self.weekly_interval
|
||||
task["days_of_week"] = self.run_time_bit_weekdays
|
||||
|
||||
elif self.task_type == "monthly":
|
||||
|
||||
# check if "last day is configured"
|
||||
if self.monthly_days_of_month >= 0x80000000:
|
||||
task["days_of_month"] = self.monthly_days_of_month - 0x80000000
|
||||
task["run_on_last_day_of_month"] = True
|
||||
else:
|
||||
task["days_of_month"] = self.monthly_days_of_month
|
||||
task["run_on_last_day_of_month"] = False
|
||||
|
||||
task["months_of_year"] = self.monthly_months_of_year
|
||||
|
||||
elif self.task_type == "monthlydow":
|
||||
task["days_of_week"] = self.run_time_bit_weekdays
|
||||
task["months_of_year"] = self.monthly_months_of_year
|
||||
task["weeks_of_month"] = self.monthly_weeks_of_month
|
||||
|
||||
return task
|
||||
|
||||
def create_task_on_agent(self):
|
||||
from agents.models import Agent
|
||||
@@ -244,85 +428,100 @@ class AutomatedTask(BaseAuditModel):
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
.get()
|
||||
)
|
||||
|
||||
if self.task_type == "scheduled":
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.8.0"):
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": self.run_time_bit_weekdays,
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"hour": dt.datetime.strptime(self.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(self.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif self.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(agent.timezone)
|
||||
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||
pytz.utc
|
||||
)
|
||||
now = djangotime.now()
|
||||
if task_time_utc < now:
|
||||
self.run_time_date = now.astimezone(agent_tz).replace(
|
||||
tzinfo=pytz.utc
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
self.save(update_fields=["run_time_date"])
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"year": int(dt.datetime.strftime(self.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(self.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(self.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(self.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(self.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse(
|
||||
"1.4.7"
|
||||
):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
|
||||
if self.remove_if_not_scheduled:
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif self.task_type == "checkfailure" or self.task_type == "manual":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
},
|
||||
"schedtaskpayload": self.generate_nats_task_payload(),
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
if self.task_type == "scheduled":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": self.run_time_bit_weekdays,
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"hour": dt.datetime.strptime(
|
||||
self.run_time_minute, "%H:%M"
|
||||
).hour,
|
||||
"min": dt.datetime.strptime(
|
||||
self.run_time_minute, "%H:%M"
|
||||
).minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif self.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(agent.timezone)
|
||||
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||
pytz.utc
|
||||
)
|
||||
now = djangotime.now()
|
||||
if task_time_utc < now:
|
||||
self.run_time_date = now.astimezone(agent_tz).replace(
|
||||
tzinfo=pytz.utc
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
self.save(update_fields=["run_time_date"])
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"year": int(dt.datetime.strftime(self.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(self.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(self.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(self.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(self.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if self.run_asap_after_missed:
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
|
||||
if self.remove_if_not_scheduled:
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif self.task_type == "checkfailure" or self.task_type == "manual":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "initial"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in."
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in.",
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully created")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully created",
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -332,29 +531,41 @@ class AutomatedTask(BaseAuditModel):
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
.get()
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": self.win_task_name,
|
||||
"enabled": self.enabled,
|
||||
},
|
||||
}
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.8.0"):
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": self.generate_nats_task_payload(editing=True),
|
||||
}
|
||||
else:
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": self.win_task_name,
|
||||
"enabled": self.enabled,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "notsynced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin"
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to modify scheduled task {self.name} on {agent.hostname}({agent.pk}). It will try again on next agent checkin",
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully modified")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} was successfully modified",
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -364,7 +575,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
.get()
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
@@ -375,14 +586,25 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
self.sync_status = "pendingdeletion"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"{agent.hostname} task {self.name} was successfully modified"
|
||||
|
||||
try:
|
||||
self.save(update_fields=["sync_status"])
|
||||
except DatabaseError:
|
||||
pass
|
||||
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname} task {self.name} will be deleted on next checkin",
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
logger.info(f"{agent.hostname} task {self.name} was deleted")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"{agent.hostname}({agent.pk}) task {self.name} was deleted",
|
||||
)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -392,12 +614,23 @@ class AutomatedTask(BaseAuditModel):
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
.get()
|
||||
)
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
|
||||
return "ok"
|
||||
|
||||
def save_collector_results(self):
|
||||
|
||||
agent_field = self.custom_field.get_or_create_field_value(self.agent)
|
||||
|
||||
value = (
|
||||
self.stdout.strip()
|
||||
if self.collector_all_output
|
||||
else self.stdout.strip().split("\n")[-1].strip()
|
||||
)
|
||||
agent_field.save_to_field(value)
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.dashboard_alert
|
||||
@@ -417,9 +650,9 @@ class AutomatedTask(BaseAuditModel):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
# Format of Email sent when Task has email alert
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
@@ -428,16 +661,15 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_sms(self):
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
# Format of SMS sent when Task has SMS alert
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
@@ -446,7 +678,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_email(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -458,7 +690,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
|
||||
CORE.send_mail(subject, body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
def send_resolved_sms(self):
|
||||
from core.models import CoreSettings
|
||||
@@ -469,4 +701,4 @@ class AutomatedTask(BaseAuditModel):
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template)
|
||||
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
|
||||
|
||||
21
api/tacticalrmm/autotasks/permissions.py
Normal file
21
api/tacticalrmm/autotasks/permissions.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
|
||||
|
||||
class AutoTaskPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET":
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_autotasks") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_autotasks")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_autotasks")
|
||||
|
||||
|
||||
class RunAutoTaskPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_run_autotasks")
|
||||
@@ -1,19 +1,163 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.serializers import CheckSerializer
|
||||
from scripts.models import Script
|
||||
from scripts.serializers import ScriptCheckSerializer
|
||||
|
||||
from .models import AutomatedTask
|
||||
|
||||
|
||||
class TaskSerializer(serializers.ModelSerializer):
|
||||
|
||||
assigned_check = CheckSerializer(read_only=True)
|
||||
check_name = serializers.ReadOnlyField(source="assigned_check.readable_desc")
|
||||
schedule = serializers.ReadOnlyField()
|
||||
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
||||
alert_template = serializers.SerializerMethodField()
|
||||
run_time_date = serializers.DateTimeField(format="iso-8601", required=False)
|
||||
expire_date = serializers.DateTimeField(
|
||||
format="iso-8601", allow_null=True, required=False
|
||||
)
|
||||
|
||||
def validate_actions(self, value):
|
||||
|
||||
if not value:
|
||||
raise serializers.ValidationError(
|
||||
f"There must be at least one action configured"
|
||||
)
|
||||
|
||||
for action in value:
|
||||
if "type" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"Each action must have a type field of either 'script' or 'cmd'"
|
||||
)
|
||||
|
||||
if action["type"] == "script":
|
||||
if "script" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A script action type must have a 'script' field with primary key of script"
|
||||
)
|
||||
|
||||
if "script_args" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A script action type must have a 'script_args' field with an array of arguments"
|
||||
)
|
||||
|
||||
if "timeout" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A script action type must have a 'timeout' field"
|
||||
)
|
||||
|
||||
if action["type"] == "cmd":
|
||||
if "command" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A command action type must have a 'command' field"
|
||||
)
|
||||
|
||||
if "timeout" not in action:
|
||||
raise serializers.ValidationError(
|
||||
f"A command action type must have a 'timeout' field"
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
def validate(self, data):
|
||||
|
||||
# allow editing with task_type not specified
|
||||
if self.instance and "task_type" not in data:
|
||||
|
||||
# remove schedule related fields from data
|
||||
if "run_time_date" in data:
|
||||
del data["run_time_date"]
|
||||
if "expire_date" in data:
|
||||
del data["expire_date"]
|
||||
if "daily_interval" in data:
|
||||
del data["daily_interval"]
|
||||
if "weekly_interval" in data:
|
||||
del data["weekly_interval"]
|
||||
if "run_time_bit_weekdays" in data:
|
||||
del data["run_time_bit_weekdays"]
|
||||
if "monthly_months_of_year" in data:
|
||||
del data["monthly_months_of_year"]
|
||||
if "monthly_days_of_month" in data:
|
||||
del data["monthly_days_of_month"]
|
||||
if "monthly_weeks_of_month" in data:
|
||||
del data["monthly_weeks_of_month"]
|
||||
if "assigned_check" in data:
|
||||
del data["assigned_check"]
|
||||
return data
|
||||
|
||||
# run_time_date required
|
||||
if (
|
||||
data["task_type"] in ["runonce", "daily", "weekly", "monthly", "monthlydow"]
|
||||
and not data["run_time_date"]
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"run_time_date is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
# daily task type validation
|
||||
if data["task_type"] == "daily":
|
||||
if "daily_interval" not in data or not data["daily_interval"]:
|
||||
raise serializers.ValidationError(
|
||||
f"daily_interval is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
# weekly task type validation
|
||||
elif data["task_type"] == "weekly":
|
||||
if "weekly_interval" not in data or not data["weekly_interval"]:
|
||||
raise serializers.ValidationError(
|
||||
f"weekly_interval is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
if "run_time_bit_weekdays" not in data or not data["run_time_bit_weekdays"]:
|
||||
raise serializers.ValidationError(
|
||||
f"run_time_bit_weekdays is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
# monthly task type validation
|
||||
elif data["task_type"] == "monthly":
|
||||
if (
|
||||
"monthly_months_of_year" not in data
|
||||
or not data["monthly_months_of_year"]
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"monthly_months_of_year is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
if "monthly_days_of_month" not in data or not data["monthly_days_of_month"]:
|
||||
raise serializers.ValidationError(
|
||||
f"monthly_days_of_month is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
# monthly day of week task type validation
|
||||
elif data["task_type"] == "monthlydow":
|
||||
if (
|
||||
"monthly_months_of_year" not in data
|
||||
or not data["monthly_months_of_year"]
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"monthly_months_of_year is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
if (
|
||||
"monthly_weeks_of_month" not in data
|
||||
or not data["monthly_weeks_of_month"]
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
f"monthly_weeks_of_month is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
if "run_time_bit_weekdays" not in data or not data["run_time_bit_weekdays"]:
|
||||
raise serializers.ValidationError(
|
||||
f"run_time_bit_weekdays is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
# check failure task type validation
|
||||
elif data["task_type"] == "checkfailure":
|
||||
if "assigned_check" not in data or not data["assigned_check"]:
|
||||
raise serializers.ValidationError(
|
||||
f"assigned_check is required for task_type '{data['task_type']}'"
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
def get_alert_template(self, obj):
|
||||
|
||||
@@ -37,44 +181,55 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class AutoTaskSerializer(serializers.ModelSerializer):
|
||||
|
||||
autotasks = TaskSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = (
|
||||
"pk",
|
||||
"hostname",
|
||||
"autotasks",
|
||||
)
|
||||
|
||||
|
||||
# below is for the windows agent
|
||||
class TaskRunnerScriptField(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Script
|
||||
fields = ["id", "filepath", "filename", "shell", "script_type"]
|
||||
|
||||
|
||||
class TaskRunnerGetSerializer(serializers.ModelSerializer):
|
||||
|
||||
script = TaskRunnerScriptField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = ["id", "script", "timeout", "enabled", "script_args"]
|
||||
|
||||
|
||||
class TaskGOGetSerializer(serializers.ModelSerializer):
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
task_actions = serializers.SerializerMethodField()
|
||||
|
||||
def get_task_actions(self, obj):
|
||||
tmp = []
|
||||
for action in obj.actions:
|
||||
if action["type"] == "cmd":
|
||||
tmp.append(
|
||||
{
|
||||
"type": "cmd",
|
||||
"command": Script.parse_script_args(
|
||||
agent=obj.agent,
|
||||
shell=action["shell"],
|
||||
args=[action["command"]],
|
||||
)[0],
|
||||
"shell": action["shell"],
|
||||
"timeout": action["timeout"],
|
||||
}
|
||||
)
|
||||
elif action["type"] == "script":
|
||||
script = Script.objects.get(pk=action["script"])
|
||||
tmp.append(
|
||||
{
|
||||
"type": "script",
|
||||
"script_name": script.name,
|
||||
"code": script.code,
|
||||
"script_args": Script.parse_script_args(
|
||||
agent=obj.agent,
|
||||
shell=script.shell,
|
||||
args=action["script_args"],
|
||||
),
|
||||
"shell": script.shell,
|
||||
"timeout": action["timeout"],
|
||||
}
|
||||
)
|
||||
return tmp
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = ["id", "script", "timeout", "enabled", "script_args"]
|
||||
fields = ["id", "continue_on_error", "enabled", "task_actions"]
|
||||
|
||||
|
||||
class TaskRunnerPatchSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TaskAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = "__all__"
|
||||
|
||||
@@ -4,14 +4,11 @@ import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from logs.models import DebugLog
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -24,7 +21,7 @@ def create_win_task_schedule(pk):
|
||||
|
||||
|
||||
@app.task
|
||||
def enable_or_disable_win_task(pk):
|
||||
def modify_win_task(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
task.modify_task_on_agent()
|
||||
@@ -53,12 +50,20 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
|
||||
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup initiated on {agent.hostname}.",
|
||||
)
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
|
||||
if not isinstance(r, list) and not r: # empty list
|
||||
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up scheduled tasks on {agent.hostname}: {r}",
|
||||
)
|
||||
return "notlist"
|
||||
|
||||
agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True))
|
||||
@@ -83,13 +88,23 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
logger.error(
|
||||
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
|
||||
DebugLog.error(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
|
||||
)
|
||||
else:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Removed orphaned task {task} from {agent.hostname}",
|
||||
)
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
DebugLog.info(
|
||||
agent=agent,
|
||||
log_type="agent_issues",
|
||||
message=f"Orphaned task cleanup finished on {agent.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
|
||||
@@ -7,83 +7,244 @@ from model_bakery import baker
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import AutomatedTask
|
||||
from .serializers import AutoTaskSerializer
|
||||
from .serializers import TaskSerializer
|
||||
from .tasks import create_win_task_schedule, remove_orphaned_win_tasks, run_win_task
|
||||
|
||||
base_url = "/tasks"
|
||||
|
||||
|
||||
class TestAutotaskViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_autotasks(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make("autotasks.AutomatedTask", agent=agent, _quantity=3)
|
||||
policy = baker.make("automation.Policy")
|
||||
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=4)
|
||||
baker.make("autotasks.AutomatedTask", _quantity=7)
|
||||
|
||||
# test returning all tasks
|
||||
url = f"{base_url}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 14)
|
||||
|
||||
# test returning tasks for a specific agent
|
||||
url = f"/agents/{agent.agent_id}/tasks/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 3)
|
||||
|
||||
# test returning tasks for a specific policy
|
||||
url = f"/automation/policies/{policy.id}/tasks/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 4)
|
||||
|
||||
@patch("automation.tasks.generate_agent_autotasks_task.delay")
|
||||
@patch("autotasks.tasks.create_win_task_schedule.delay")
|
||||
def test_add_autotask(
|
||||
self, create_win_task_schedule, generate_agent_autotasks_task
|
||||
):
|
||||
url = "/tasks/automatedtasks/"
|
||||
url = f"{base_url}/"
|
||||
|
||||
# setup data
|
||||
script = baker.make_recipe("scripts.script")
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
# test script set to invalid pk
|
||||
data = {"autotask": {"script": 500}}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test invalid policy
|
||||
data = {"autotask": {"script": script.id}, "policy": 500}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
actions = [
|
||||
{"type": "cmd", "command": "command", "timeout": 30},
|
||||
{"type": "script", "script": script.id, "script_args": [], "timeout": 90},
|
||||
]
|
||||
|
||||
# test invalid agent
|
||||
data = {
|
||||
"autotask": {"script": script.id},
|
||||
"agent": 500,
|
||||
"agent": "13kfs89as9d89asd8f98df8df8dfhdf",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test add task to agent
|
||||
# test add task without actions
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Scheduled with Assigned Check",
|
||||
"run_time_days": ["Sunday", "Monday", "Friday"],
|
||||
"run_time_minute": "10:00",
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
"script_args": None,
|
||||
"task_type": "scheduled",
|
||||
"assigned_check": check.id,
|
||||
},
|
||||
"agent": agent.id,
|
||||
"agent": agent.agent_id,
|
||||
"name": "Test Task Scheduled with Assigned Check",
|
||||
"run_time_days": 56,
|
||||
"enabled": True,
|
||||
"actions": [],
|
||||
"task_type": "manual",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test add checkfailure task_type to agent without check
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Check Failure",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "checkfailure",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
create_win_task_schedule.not_assert_called()
|
||||
|
||||
# test add manual task_type to agent
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Manual",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "manual",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
create_win_task_schedule.assert_called()
|
||||
create_win_task_schedule.reset_mock()
|
||||
|
||||
# test add daily task_type to agent
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Daily",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "daily",
|
||||
"daily_interval": 1,
|
||||
"run_time_date": djangotime.now(),
|
||||
"repetition_interval": "30M",
|
||||
"repetition_duration": "1D",
|
||||
"random_task_delay": "5M",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# test add weekly task_type to agent
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Weekly",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "weekly",
|
||||
"weekly_interval": 2,
|
||||
"run_time_bit_weekdays": 26,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
"task_instance_policy": 2,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
create_win_task_schedule.assert_called()
|
||||
create_win_task_schedule.reset_mock()
|
||||
|
||||
# test add monthly task_type to agent
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Monthly",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "monthly",
|
||||
"monthly_months_of_year": 56,
|
||||
"monthly_days_of_month": 350,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
create_win_task_schedule.assert_called()
|
||||
create_win_task_schedule.reset_mock()
|
||||
|
||||
# test add monthly day-of-week task_type to agent
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Monthly",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "monthlydow",
|
||||
"monthly_months_of_year": 500,
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
create_win_task_schedule.assert_called()
|
||||
create_win_task_schedule.reset_mock()
|
||||
|
||||
# test add monthly day-of-week task_type to agent with custom field
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Monthly",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "monthlydow",
|
||||
"monthly_months_of_year": 500,
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
"custom_field": custom_field.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
create_win_task_schedule.assert_called()
|
||||
create_win_task_schedule.reset_mock()
|
||||
|
||||
# test add checkfailure task_type to agent
|
||||
data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Check Failure",
|
||||
"enabled": True,
|
||||
"actions": actions,
|
||||
"task_type": "checkfailure",
|
||||
"assigned_check": check.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
create_win_task_schedule.assert_called()
|
||||
create_win_task_schedule.reset_mock()
|
||||
|
||||
# test add task to policy
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Manual",
|
||||
"run_time_days": [],
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
"script_args": None,
|
||||
"task_type": "manual",
|
||||
"assigned_check": None,
|
||||
},
|
||||
"policy": policy.id, # type: ignore
|
||||
"name": "Test Task Manual",
|
||||
"enabled": True,
|
||||
"task_type": "manual",
|
||||
"actions": actions,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
@@ -97,54 +258,120 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
baker.make("autotasks.AutomatedTask", agent=agent, _quantity=3)
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
url = f"/tasks/{agent.id}/automatedtasks/"
|
||||
url = f"{base_url}/{task.id}/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AutoTaskSerializer(agent)
|
||||
serializer = TaskSerializer(task)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||
@patch("autotasks.tasks.modify_win_task.delay")
|
||||
@patch("automation.tasks.update_policy_autotasks_fields_task.delay")
|
||||
def test_update_autotask(
|
||||
self, update_policy_autotasks_fields_task, enable_or_disable_win_task
|
||||
self, update_policy_autotasks_fields_task, modify_win_task
|
||||
):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
policy = baker.make("automation.Policy")
|
||||
policy_task = baker.make("autotasks.AutomatedTask", policy=policy)
|
||||
policy_task = baker.make("autotasks.AutomatedTask", enabled=True, policy=policy)
|
||||
custom_field = baker.make("core.CustomField")
|
||||
script = baker.make("scripts.Script")
|
||||
|
||||
actions = [
|
||||
{"type": "cmd", "command": "command", "timeout": 30},
|
||||
{"type": "script", "script": script.id, "script_args": [], "timeout": 90},
|
||||
]
|
||||
|
||||
# test invalid url
|
||||
resp = self.client.patch("/tasks/500/automatedtasks/", format="json")
|
||||
resp = self.client.put(f"{base_url}/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||
url = f"{base_url}/{agent_task.id}/" # type: ignore
|
||||
|
||||
# test editing agent task
|
||||
data = {"enableordisable": False}
|
||||
# test editing agent task with no task update
|
||||
data = {"name": "New Name"}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
enable_or_disable_win_task.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
modify_win_task.not_called() # type: ignore
|
||||
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||
# test editing agent task with agent task update
|
||||
data = {"enabled": False}
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
modify_win_task.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
modify_win_task.reset_mock()
|
||||
|
||||
# test editing agent task with task_type
|
||||
data = {
|
||||
"name": "Monthly",
|
||||
"actions": actions,
|
||||
"task_type": "monthlydow",
|
||||
"monthly_months_of_year": 500,
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
"custom_field": custom_field.id,
|
||||
"run_asap_afteR_missed": False,
|
||||
}
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
modify_win_task.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
modify_win_task.reset_mock()
|
||||
|
||||
# test trying to edit with empty actions
|
||||
data = {
|
||||
"name": "Monthly",
|
||||
"actions": [],
|
||||
"task_type": "monthlydow",
|
||||
"monthly_months_of_year": 500,
|
||||
"monthly_weeks_of_month": 4,
|
||||
"run_time_bit_weekdays": 15,
|
||||
"run_time_date": djangotime.now(),
|
||||
"expire_date": djangotime.now(),
|
||||
"repetition_interval": "30S",
|
||||
"repetition_duration": "1H",
|
||||
"random_task_delay": "5M",
|
||||
"run_asap_afteR_missed": False,
|
||||
}
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
modify_win_task.assert_not_called # type: ignore
|
||||
|
||||
# test editing policy tasks
|
||||
url = f"{base_url}/{policy_task.id}/" # type: ignore
|
||||
|
||||
# test editing policy task
|
||||
data = {"enableordisable": True}
|
||||
data = {"enabled": False}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
update_policy_autotasks_fields_task.assert_called_with(
|
||||
task=policy_task.id, update_agent=True # type: ignore
|
||||
)
|
||||
update_policy_autotasks_fields_task.reset_mock()
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
# test editing policy task with no agent update
|
||||
data = {"name": "New Name"}
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
update_policy_autotasks_fields_task.assert_called_with(task=policy_task.id)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
@patch("automation.tasks.delete_policy_autotasks_task.delay")
|
||||
@@ -158,19 +385,20 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
policy_task = baker.make("autotasks.AutomatedTask", policy=policy)
|
||||
|
||||
# test invalid url
|
||||
resp = self.client.delete("/tasks/500/automatedtasks/", format="json")
|
||||
resp = self.client.delete(f"{base_url}/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete agent task
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||
url = f"{base_url}/{agent_task.id}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
delete_win_task_schedule.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
|
||||
# test delete policy task
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||
url = f"{base_url}/{policy_task.id}/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertFalse(AutomatedTask.objects.filter(pk=policy_task.id)) # type: ignore
|
||||
delete_policy_autotasks_task.assert_called_with(task=policy_task.id) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
@@ -182,16 +410,16 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
# test invalid url
|
||||
resp = self.client.get("/tasks/runwintask/500/", format="json")
|
||||
resp = self.client.post(f"{base_url}/500/run/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test run agent task
|
||||
url = f"/tasks/runwintask/{task.id}/" # type: ignore
|
||||
resp = self.client.get(url, format="json")
|
||||
url = f"{base_url}/{task.id}/run/" # type: ignore
|
||||
resp = self.client.post(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
run_win_task.assert_called()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
|
||||
class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
@@ -409,3 +637,227 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
timeout=5,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
|
||||
class TestTaskPermissions(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.client_setup()
|
||||
|
||||
def test_get_tasks_permissions(self):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent, _quantity=5)
|
||||
unauthorized_task = baker.make(
|
||||
"autotasks.AutomatedTask", agent=unauthorized_agent, _quantity=7
|
||||
)
|
||||
|
||||
policy_tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=2)
|
||||
|
||||
# test super user access
|
||||
self.check_authorized_superuser("get", f"{base_url}/")
|
||||
self.check_authorized_superuser("get", f"/agents/{agent.agent_id}/tasks/")
|
||||
self.check_authorized_superuser(
|
||||
"get", f"/agents/{unauthorized_agent.agent_id}/tasks/"
|
||||
)
|
||||
self.check_authorized_superuser(
|
||||
"get", f"/automation/policies/{policy.id}/tasks/"
|
||||
)
|
||||
|
||||
user = self.create_user_with_roles([])
|
||||
self.client.force_authenticate(user=user)
|
||||
|
||||
self.check_not_authorized("get", f"{base_url}/")
|
||||
self.check_not_authorized("get", f"/agents/{agent.agent_id}/tasks/")
|
||||
self.check_not_authorized(
|
||||
"get", f"/agents/{unauthorized_agent.agent_id}/tasks/"
|
||||
)
|
||||
self.check_not_authorized("get", f"/automation/policies/{policy.id}/tasks/")
|
||||
|
||||
# add list software role to user
|
||||
user.role.can_list_autotasks = True
|
||||
user.role.save()
|
||||
|
||||
r = self.check_authorized("get", f"{base_url}/")
|
||||
self.assertEqual(len(r.data), 14)
|
||||
r = self.check_authorized("get", f"/agents/{agent.agent_id}/tasks/")
|
||||
self.assertEqual(len(r.data), 5)
|
||||
r = self.check_authorized(
|
||||
"get", f"/agents/{unauthorized_agent.agent_id}/tasks/"
|
||||
)
|
||||
self.assertEqual(len(r.data), 7)
|
||||
r = self.check_authorized("get", f"/automation/policies/{policy.id}/tasks/")
|
||||
self.assertEqual(len(r.data), 2)
|
||||
|
||||
# test limiting to client
|
||||
user.role.can_view_clients.set([agent.client])
|
||||
self.check_not_authorized(
|
||||
"get", f"/agents/{unauthorized_agent.agent_id}/tasks/"
|
||||
)
|
||||
self.check_authorized("get", f"/agents/{agent.agent_id}/tasks/")
|
||||
self.check_authorized("get", f"/automation/policies/{policy.id}/tasks/")
|
||||
|
||||
# make sure queryset is limited too
|
||||
r = self.client.get(f"{base_url}/")
|
||||
self.assertEqual(len(r.data), 7)
|
||||
|
||||
def test_add_task_permissions(self):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
script = baker.make("scripts.Script")
|
||||
|
||||
policy_data = {
|
||||
"policy": policy.id, # type: ignore
|
||||
"name": "Test Task Manual",
|
||||
"run_time_days": [],
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
"script_args": [],
|
||||
"task_type": "manual",
|
||||
"assigned_check": None,
|
||||
}
|
||||
|
||||
agent_data = {
|
||||
"agent": agent.agent_id,
|
||||
"name": "Test Task Manual",
|
||||
"run_time_days": [],
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
"script_args": [],
|
||||
"task_type": "manual",
|
||||
"assigned_check": None,
|
||||
}
|
||||
|
||||
unauthorized_agent_data = {
|
||||
"agent": unauthorized_agent.agent_id,
|
||||
"name": "Test Task Manual",
|
||||
"run_time_days": [],
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
"script_args": [],
|
||||
"task_type": "manual",
|
||||
"assigned_check": None,
|
||||
}
|
||||
|
||||
url = f"{base_url}/"
|
||||
|
||||
for data in [policy_data, agent_data]:
|
||||
# test superuser access
|
||||
self.check_authorized_superuser("post", url, data)
|
||||
|
||||
user = self.create_user_with_roles([])
|
||||
self.client.force_authenticate(user=user)
|
||||
|
||||
# test user without role
|
||||
self.check_not_authorized("post", url, data)
|
||||
|
||||
# add user to role and test
|
||||
setattr(user.role, "can_manage_autotasks", True)
|
||||
user.role.save()
|
||||
|
||||
self.check_authorized("post", url, data)
|
||||
|
||||
# limit user to client
|
||||
user.role.can_view_clients.set([agent.client])
|
||||
if "agent" in data.keys():
|
||||
self.check_authorized("post", url, data)
|
||||
self.check_not_authorized("post", url, unauthorized_agent_data)
|
||||
else:
|
||||
self.check_authorized("post", url, data)
|
||||
|
||||
# mock the task delete method so it actually isn't deleted
|
||||
@patch("autotasks.models.AutomatedTask.delete")
|
||||
def test_task_get_edit_delete_permissions(self, delete_task):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
unauthorized_task = baker.make(
|
||||
"autotasks.AutomatedTask", agent=unauthorized_agent
|
||||
)
|
||||
policy_task = baker.make("autotasks.AutomatedTask", policy=policy)
|
||||
|
||||
for method in ["get", "put", "delete"]:
|
||||
|
||||
url = f"{base_url}/{task.id}/"
|
||||
unauthorized_url = f"{base_url}/{unauthorized_task.id}/"
|
||||
policy_url = f"{base_url}/{policy_task.id}/"
|
||||
|
||||
# test superuser access
|
||||
self.check_authorized_superuser(method, url)
|
||||
self.check_authorized_superuser(method, unauthorized_url)
|
||||
self.check_authorized_superuser(method, policy_url)
|
||||
|
||||
user = self.create_user_with_roles([])
|
||||
self.client.force_authenticate(user=user)
|
||||
|
||||
# test user without role
|
||||
self.check_not_authorized(method, url)
|
||||
self.check_not_authorized(method, unauthorized_url)
|
||||
self.check_not_authorized(method, policy_url)
|
||||
|
||||
# add user to role and test
|
||||
setattr(
|
||||
user.role,
|
||||
"can_list_autotasks" if method == "get" else "can_manage_autotasks",
|
||||
True,
|
||||
)
|
||||
user.role.save()
|
||||
|
||||
self.check_authorized(method, url)
|
||||
self.check_authorized(method, unauthorized_url)
|
||||
self.check_authorized(method, policy_url)
|
||||
|
||||
# limit user to client if agent task
|
||||
user.role.can_view_clients.set([agent.client])
|
||||
|
||||
self.check_authorized(method, url)
|
||||
self.check_not_authorized(method, unauthorized_url)
|
||||
self.check_authorized(method, policy_url)
|
||||
|
||||
def test_task_action_permissions(self):
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
unauthorized_agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
unauthorized_task = baker.make(
|
||||
"autotasks.AutomatedTask", agent=unauthorized_agent
|
||||
)
|
||||
|
||||
url = f"{base_url}/{task.id}/run/"
|
||||
unauthorized_url = f"{base_url}/{unauthorized_task.id}/run/"
|
||||
|
||||
# test superuser access
|
||||
self.check_authorized_superuser("post", url)
|
||||
self.check_authorized_superuser("post", unauthorized_url)
|
||||
|
||||
user = self.create_user_with_roles([])
|
||||
self.client.force_authenticate(user=user)
|
||||
|
||||
# test user without role
|
||||
self.check_not_authorized("post", url)
|
||||
self.check_not_authorized("post", unauthorized_url)
|
||||
|
||||
# add user to role and test
|
||||
user.role.can_run_autotasks = True
|
||||
user.role.save()
|
||||
|
||||
self.check_authorized("post", url)
|
||||
self.check_authorized("post", unauthorized_url)
|
||||
|
||||
# limit user to client if agent task
|
||||
user.role.can_view_sites.set([agent.site])
|
||||
|
||||
self.check_authorized("post", url)
|
||||
self.check_not_authorized("post", unauthorized_url)
|
||||
|
||||
def test_policy_fields_to_copy_exists(self):
|
||||
fields = [i.name for i in AutomatedTask._meta.get_fields()]
|
||||
task = baker.make("autotasks.AutomatedTask")
|
||||
for i in task.policy_fields_to_copy: # type: ignore
|
||||
self.assertIn(i, fields)
|
||||
|
||||
@@ -3,7 +3,7 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("<int:pk>/automatedtasks/", views.AutoTask.as_view()),
|
||||
path("automatedtasks/", views.AddAutoTask.as_view()),
|
||||
path("runwintask/<int:pk>/", views.run_task),
|
||||
path("", views.GetAddAutoTasks.as_view()),
|
||||
path("<int:pk>/", views.GetEditDeleteAutoTask.as_view()),
|
||||
path("<int:pk>/run/", views.RunAutoTask.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,52 +1,52 @@
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from tacticalrmm.permissions import _has_perm_on_agent
|
||||
|
||||
from .models import AutomatedTask
|
||||
from .serializers import AutoTaskSerializer, TaskSerializer
|
||||
from .permissions import AutoTaskPerms, RunAutoTaskPerms
|
||||
from .serializers import TaskSerializer
|
||||
|
||||
|
||||
class AddAutoTask(APIView):
|
||||
class GetAddAutoTasks(APIView):
|
||||
permission_classes = [IsAuthenticated, AutoTaskPerms]
|
||||
|
||||
def get(self, request, agent_id=None, policy=None):
|
||||
|
||||
if agent_id:
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
tasks = AutomatedTask.objects.filter(agent=agent)
|
||||
elif policy:
|
||||
policy = get_object_or_404(Policy, id=policy)
|
||||
tasks = AutomatedTask.objects.filter(policy=policy)
|
||||
else:
|
||||
tasks = AutomatedTask.objects.filter_by_role(request.user)
|
||||
return Response(TaskSerializer(tasks, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
from automation.models import Policy
|
||||
from automation.tasks import generate_agent_autotasks_task
|
||||
|
||||
from autotasks.tasks import create_win_task_schedule
|
||||
|
||||
data = request.data
|
||||
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
||||
data = request.data.copy()
|
||||
|
||||
# Determine if adding check to Policy or Agent
|
||||
if "policy" in data:
|
||||
policy = get_object_or_404(Policy, id=data["policy"])
|
||||
# Object used for filter and save
|
||||
parent = {"policy": policy}
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||
parent = {"agent": agent}
|
||||
# Determine if adding to an agent and replace agent_id with pk
|
||||
if "agent" in data.keys():
|
||||
agent = get_object_or_404(Agent, agent_id=data["agent"])
|
||||
|
||||
check = None
|
||||
if data["autotask"]["assigned_check"]:
|
||||
check = get_object_or_404(Check, pk=data["autotask"]["assigned_check"])
|
||||
if not _has_perm_on_agent(request.user, agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
bit_weekdays = None
|
||||
if data["autotask"]["run_time_days"]:
|
||||
bit_weekdays = get_bit_days(data["autotask"]["run_time_days"])
|
||||
data["agent"] = agent.pk
|
||||
|
||||
del data["autotask"]["run_time_days"]
|
||||
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
||||
serializer = TaskSerializer(data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
task = serializer.save(
|
||||
**parent,
|
||||
script=script,
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
assigned_check=check,
|
||||
run_time_bit_weekdays=bit_weekdays,
|
||||
)
|
||||
|
||||
if task.agent:
|
||||
@@ -55,56 +55,35 @@ class AddAutoTask(APIView):
|
||||
elif task.policy:
|
||||
generate_agent_autotasks_task.delay(policy=task.policy.pk)
|
||||
|
||||
return Response("Task will be created shortly!")
|
||||
return Response(
|
||||
"The task has been created. It will show up on the agent on next checkin"
|
||||
)
|
||||
|
||||
|
||||
class AutoTask(APIView):
|
||||
class GetEditDeleteAutoTask(APIView):
|
||||
permission_classes = [IsAuthenticated, AutoTaskPerms]
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
ctx = {
|
||||
"default_tz": get_default_timezone(),
|
||||
"agent_tz": agent.time_zone,
|
||||
}
|
||||
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
return Response(TaskSerializer(task).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
from automation.tasks import update_policy_autotasks_fields_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
serializer = TaskSerializer(instance=task, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
if task.policy:
|
||||
update_policy_autotasks_fields_task.delay(task=task.pk)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def patch(self, request, pk):
|
||||
from automation.tasks import update_policy_autotasks_fields_task
|
||||
from autotasks.tasks import enable_or_disable_win_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if "enableordisable" in request.data:
|
||||
action = request.data["enableordisable"]
|
||||
task.enabled = action
|
||||
task.save(update_fields=["enabled"])
|
||||
action = "enabled" if action else "disabled"
|
||||
|
||||
if task.policy:
|
||||
update_policy_autotasks_fields_task.delay(
|
||||
task=task.pk, update_agent=True
|
||||
)
|
||||
elif task.agent:
|
||||
enable_or_disable_win_task.delay(pk=task.pk)
|
||||
|
||||
return Response(f"Task will be {action} shortly")
|
||||
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
return Response("The task was updated")
|
||||
|
||||
def delete(self, request, pk):
|
||||
from automation.tasks import delete_policy_autotasks_task
|
||||
@@ -112,18 +91,28 @@ class AutoTask(APIView):
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
if task.agent:
|
||||
delete_win_task_schedule.delay(pk=task.pk)
|
||||
elif task.policy:
|
||||
delete_policy_autotasks_task.delay(task=task.pk)
|
||||
task.delete()
|
||||
|
||||
return Response(f"{task.name} will be deleted shortly")
|
||||
|
||||
|
||||
@api_view()
|
||||
def run_task(request, pk):
|
||||
from autotasks.tasks import run_win_task
|
||||
class RunAutoTask(APIView):
|
||||
permission_classes = [IsAuthenticated, RunAutoTaskPerms]
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
run_win_task.delay(pk=pk)
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
def post(self, request, pk):
|
||||
from autotasks.tasks import run_win_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id):
|
||||
raise PermissionDenied()
|
||||
|
||||
run_win_task.delay(pk=pk)
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
|
||||
22
api/tacticalrmm/checks/migrations/0024_auto_20210606_1632.py
Normal file
22
api/tacticalrmm/checks/migrations/0024_auto_20210606_1632.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 3.2.1 on 2021-06-06 16:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0023_check_run_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='checkhistory',
|
||||
name='check_history',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='checkhistory',
|
||||
name='check_id',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/checks/migrations/0025_auto_20210917_1954.py
Normal file
23
api/tacticalrmm/checks/migrations/0025_auto_20210917_1954.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-17 19:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0024_auto_20210606_1632"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="check",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="check",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,23 +1,14 @@
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import string
|
||||
from statistics import mean
|
||||
from typing import Any
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
|
||||
from .utils import bytes2human
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
|
||||
CHECK_TYPE_CHOICES = [
|
||||
("diskspace", "Disk Space Check"),
|
||||
@@ -56,6 +47,7 @@ EVT_LOG_FAIL_WHEN_CHOICES = [
|
||||
|
||||
|
||||
class Check(BaseAuditModel):
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
# common fields
|
||||
|
||||
@@ -236,16 +228,16 @@ class Check(BaseAuditModel):
|
||||
|
||||
return self.last_run
|
||||
|
||||
@property
|
||||
def non_editable_fields(self) -> list[str]:
|
||||
@staticmethod
|
||||
def non_editable_fields() -> list[str]:
|
||||
return [
|
||||
"check_type",
|
||||
"status",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"fail_count",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"status",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
@@ -315,9 +307,9 @@ class Check(BaseAuditModel):
|
||||
)
|
||||
|
||||
def add_check_history(self, value: int, more_info: Any = None) -> None:
|
||||
CheckHistory.objects.create(check_history=self, y=value, results=more_info)
|
||||
CheckHistory.objects.create(check_id=self.pk, y=value, results=more_info)
|
||||
|
||||
def handle_checkv2(self, data):
|
||||
def handle_check(self, data):
|
||||
from alerts.models import Alert
|
||||
|
||||
# cpuload or mem checks
|
||||
@@ -348,9 +340,6 @@ class Check(BaseAuditModel):
|
||||
elif self.check_type == "diskspace":
|
||||
if data["exists"]:
|
||||
percent_used = round(data["percent_used"])
|
||||
total = bytes2human(data["total"])
|
||||
free = bytes2human(data["free"])
|
||||
|
||||
if self.error_threshold and (100 - percent_used) < self.error_threshold:
|
||||
self.status = "failing"
|
||||
self.alert_severity = "error"
|
||||
@@ -364,7 +353,7 @@ class Check(BaseAuditModel):
|
||||
else:
|
||||
self.status = "passing"
|
||||
|
||||
self.more_info = f"Total: {total}B, Free: {free}B"
|
||||
self.more_info = data["more_info"]
|
||||
|
||||
# add check history
|
||||
self.add_check_history(100 - percent_used)
|
||||
@@ -380,12 +369,7 @@ class Check(BaseAuditModel):
|
||||
self.stdout = data["stdout"]
|
||||
self.stderr = data["stderr"]
|
||||
self.retcode = data["retcode"]
|
||||
try:
|
||||
# python agent
|
||||
self.execution_time = "{:.4f}".format(data["stop"] - data["start"])
|
||||
except:
|
||||
# golang agent
|
||||
self.execution_time = "{:.4f}".format(data["runtime"])
|
||||
self.execution_time = "{:.4f}".format(data["runtime"])
|
||||
|
||||
if data["retcode"] in self.info_return_codes:
|
||||
self.alert_severity = "info"
|
||||
@@ -421,18 +405,8 @@ class Check(BaseAuditModel):
|
||||
|
||||
# ping checks
|
||||
elif self.check_type == "ping":
|
||||
success = ["Reply", "bytes", "time", "TTL"]
|
||||
output = data["output"]
|
||||
|
||||
if data["has_stdout"]:
|
||||
if all(x in output for x in success):
|
||||
self.status = "passing"
|
||||
else:
|
||||
self.status = "failing"
|
||||
elif data["has_stderr"]:
|
||||
self.status = "failing"
|
||||
|
||||
self.more_info = output
|
||||
self.status = data["status"]
|
||||
self.more_info = data["output"]
|
||||
self.save(update_fields=["more_info"])
|
||||
|
||||
self.add_check_history(
|
||||
@@ -441,41 +415,8 @@ class Check(BaseAuditModel):
|
||||
|
||||
# windows service checks
|
||||
elif self.check_type == "winsvc":
|
||||
svc_stat = data["status"]
|
||||
self.more_info = f"Status {svc_stat.upper()}"
|
||||
|
||||
if data["exists"]:
|
||||
if svc_stat == "running":
|
||||
self.status = "passing"
|
||||
elif svc_stat == "start_pending" and self.pass_if_start_pending:
|
||||
self.status = "passing"
|
||||
else:
|
||||
if self.agent and self.restart_if_stopped:
|
||||
nats_data = {
|
||||
"func": "winsvcaction",
|
||||
"payload": {"name": self.svc_name, "action": "start"},
|
||||
}
|
||||
r = asyncio.run(self.agent.nats_cmd(nats_data, timeout=32))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
self.status = "failing"
|
||||
elif not r["success"] and r["errormsg"]:
|
||||
self.status = "failing"
|
||||
elif r["success"]:
|
||||
self.status = "passing"
|
||||
self.more_info = f"Status RUNNING"
|
||||
else:
|
||||
self.status = "failing"
|
||||
else:
|
||||
self.status = "failing"
|
||||
|
||||
else:
|
||||
if self.pass_if_svc_not_exist:
|
||||
self.status = "passing"
|
||||
else:
|
||||
self.status = "failing"
|
||||
|
||||
self.more_info = f"Service {self.svc_name} does not exist"
|
||||
|
||||
self.status = data["status"]
|
||||
self.more_info = data["more_info"]
|
||||
self.save(update_fields=["more_info"])
|
||||
|
||||
self.add_check_history(
|
||||
@@ -483,49 +424,7 @@ class Check(BaseAuditModel):
|
||||
)
|
||||
|
||||
elif self.check_type == "eventlog":
|
||||
log = []
|
||||
is_wildcard = self.event_id_is_wildcard
|
||||
eventType = self.event_type
|
||||
eventID = self.event_id
|
||||
source = self.event_source
|
||||
message = self.event_message
|
||||
r = data["log"]
|
||||
|
||||
for i in r:
|
||||
if i["eventType"] == eventType:
|
||||
if not is_wildcard and not int(i["eventID"]) == eventID:
|
||||
continue
|
||||
|
||||
if not source and not message:
|
||||
if is_wildcard:
|
||||
log.append(i)
|
||||
elif int(i["eventID"]) == eventID:
|
||||
log.append(i)
|
||||
continue
|
||||
|
||||
if source and message:
|
||||
if is_wildcard:
|
||||
if source in i["source"] and message in i["message"]:
|
||||
log.append(i)
|
||||
|
||||
elif int(i["eventID"]) == eventID:
|
||||
if source in i["source"] and message in i["message"]:
|
||||
log.append(i)
|
||||
|
||||
continue
|
||||
|
||||
if source and source in i["source"]:
|
||||
if is_wildcard:
|
||||
log.append(i)
|
||||
elif int(i["eventID"]) == eventID:
|
||||
log.append(i)
|
||||
|
||||
if message and message in i["message"]:
|
||||
if is_wildcard:
|
||||
log.append(i)
|
||||
elif int(i["eventID"]) == eventID:
|
||||
log.append(i)
|
||||
|
||||
log = data["log"]
|
||||
if self.fail_when == "contains":
|
||||
if log and len(log) >= self.number_of_events_b4_alert:
|
||||
self.status = "failing"
|
||||
@@ -556,33 +455,23 @@ class Check(BaseAuditModel):
|
||||
|
||||
elif self.status == "passing":
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["status", "fail_count", "alert_severity"])
|
||||
self.save()
|
||||
if Alert.objects.filter(assigned_check=self, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(self)
|
||||
|
||||
return self.status
|
||||
|
||||
def handle_assigned_task(self) -> None:
|
||||
for task in self.assignedtask.all(): # type: ignore
|
||||
if task.enabled:
|
||||
task.run_win_task()
|
||||
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
from .serializers import CheckSerializer
|
||||
from .serializers import CheckAuditSerializer
|
||||
|
||||
return CheckSerializer(check).data
|
||||
|
||||
# for policy diskchecks
|
||||
@staticmethod
|
||||
def all_disks():
|
||||
return [f"{i}:" for i in string.ascii_uppercase]
|
||||
|
||||
# for policy service checks
|
||||
@staticmethod
|
||||
def load_default_services():
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "services/default_services.json")
|
||||
) as f:
|
||||
default_services = json.load(f)
|
||||
|
||||
return default_services
|
||||
return CheckAuditSerializer(check).data
|
||||
|
||||
def create_policy_check(self, agent=None, policy=None):
|
||||
|
||||
@@ -598,6 +487,14 @@ class Check(BaseAuditModel):
|
||||
script=self.script,
|
||||
)
|
||||
|
||||
for task in self.assignedtask.all(): # type: ignore
|
||||
if policy or (
|
||||
agent and not agent.autotasks.filter(parent_task=task.pk).exists()
|
||||
):
|
||||
task.create_policy_task(
|
||||
agent=agent, policy=policy, assigned_check=check
|
||||
)
|
||||
|
||||
for field in self.policy_fields_to_copy:
|
||||
setattr(check, field, getattr(self, field))
|
||||
|
||||
@@ -770,14 +667,12 @@ class Check(BaseAuditModel):
|
||||
|
||||
|
||||
class CheckHistory(models.Model):
|
||||
check_history = models.ForeignKey(
|
||||
Check,
|
||||
related_name="check_history",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
objects = PermissionQuerySet.as_manager()
|
||||
|
||||
check_id = models.PositiveIntegerField(default=0)
|
||||
x = models.DateTimeField(auto_now_add=True)
|
||||
y = models.PositiveIntegerField(null=True, blank=True, default=None)
|
||||
results = models.JSONField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.check_history.readable_desc
|
||||
return str(self.x)
|
||||
|
||||
23
api/tacticalrmm/checks/permissions.py
Normal file
23
api/tacticalrmm/checks/permissions.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
|
||||
|
||||
class ChecksPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
if r.method == "GET" or r.method == "PATCH":
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_checks") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_checks")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_checks")
|
||||
|
||||
|
||||
class RunChecksPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view):
|
||||
return _has_perm(r, "can_run_checks") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
@@ -3,9 +3,10 @@ import validators as _v
|
||||
from rest_framework import serializers
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from scripts.serializers import ScriptCheckSerializer, ScriptSerializer
|
||||
from scripts.serializers import ScriptCheckSerializer
|
||||
|
||||
from .models import Check, CheckHistory
|
||||
from scripts.models import Script
|
||||
|
||||
|
||||
class AssignedTaskField(serializers.ModelSerializer):
|
||||
@@ -17,7 +18,6 @@ class AssignedTaskField(serializers.ModelSerializer):
|
||||
class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
readable_desc = serializers.ReadOnlyField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
assigned_task = serializers.SerializerMethodField()
|
||||
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
||||
history_info = serializers.ReadOnlyField()
|
||||
@@ -56,6 +56,11 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
def validate(self, val):
|
||||
try:
|
||||
check_type = val["check_type"]
|
||||
filter = (
|
||||
{"agent": val["agent"]}
|
||||
if "agent" in val.keys()
|
||||
else {"policy": val["policy"]}
|
||||
)
|
||||
except KeyError:
|
||||
return val
|
||||
|
||||
@@ -64,7 +69,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
if check_type == "diskspace":
|
||||
if not self.instance: # only on create
|
||||
checks = (
|
||||
Check.objects.filter(**self.context)
|
||||
Check.objects.filter(**filter)
|
||||
.filter(check_type="diskspace")
|
||||
.exclude(managed_by_policy=True)
|
||||
)
|
||||
@@ -101,7 +106,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
if check_type == "cpuload" and not self.instance:
|
||||
if (
|
||||
Check.objects.filter(**self.context, check_type="cpuload")
|
||||
Check.objects.filter(**filter, check_type="cpuload")
|
||||
.exclude(managed_by_policy=True)
|
||||
.exists()
|
||||
):
|
||||
@@ -125,7 +130,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
if check_type == "memory" and not self.instance:
|
||||
if (
|
||||
Check.objects.filter(**self.context, check_type="memory")
|
||||
Check.objects.filter(**filter, check_type="memory")
|
||||
.exclude(managed_by_policy=True)
|
||||
.exists()
|
||||
):
|
||||
@@ -158,13 +163,16 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
|
||||
|
||||
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
# only send data needed for agent to run a check
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
script_args = serializers.SerializerMethodField()
|
||||
|
||||
def get_assigned_tasks(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
tasks = obj.assignedtask.all()
|
||||
return AssignedTaskCheckRunnerField(tasks, many=True).data
|
||||
def get_script_args(self, obj):
|
||||
if obj.check_type != "script":
|
||||
return []
|
||||
|
||||
return Script.parse_script_args(
|
||||
agent=obj.agent, shell=obj.script.shell, args=obj.script_args
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
@@ -193,6 +201,7 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
"dashboard_alert",
|
||||
]
|
||||
|
||||
|
||||
@@ -215,3 +224,9 @@ class CheckHistorySerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CheckHistory
|
||||
fields = ("x", "y", "results")
|
||||
|
||||
|
||||
class CheckAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Check
|
||||
fields = "__all__"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,10 +3,9 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("checks/", views.AddCheck.as_view()),
|
||||
path("<int:pk>/check/", views.GetUpdateDeleteCheck.as_view()),
|
||||
path("<pk>/loadchecks/", views.load_checks),
|
||||
path("getalldisks/", views.get_disks_for_policies),
|
||||
path("runchecks/<pk>/", views.run_checks),
|
||||
path("history/<int:checkpk>/", views.CheckHistory.as_view()),
|
||||
path("", views.GetAddChecks.as_view()),
|
||||
path("<int:pk>/", views.GetUpdateDeleteCheck.as_view()),
|
||||
path("<int:pk>/reset/", views.ResetCheck.as_view()),
|
||||
path("<agent:agent_id>/run/", views.run_checks),
|
||||
path("<int:pk>/history/", views.GetCheckHistory.as_view()),
|
||||
]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user