Compare commits
964 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2216ee422e | ||
|
|
9acda5696e | ||
|
|
dc6255048a | ||
|
|
2acde429d7 | ||
|
|
efcac1adac | ||
|
|
81d5ecd758 | ||
|
|
d9ff004454 | ||
|
|
d57135d793 | ||
|
|
bb5a0023af | ||
|
|
e3c25a167e | ||
|
|
5be93ae17d | ||
|
|
3a2511d4a1 | ||
|
|
8ec7d98eef | ||
|
|
9421ae25f7 | ||
|
|
5b288b6fa1 | ||
|
|
d35ed2980b | ||
|
|
6d8df6d2b9 | ||
|
|
a839513f7f | ||
|
|
97b37b4742 | ||
|
|
4894031219 | ||
|
|
8985b5511c | ||
|
|
b3c2a6a0cc | ||
|
|
7291b440bb | ||
|
|
d75f134677 | ||
|
|
e60069ec1d | ||
|
|
034f49573d | ||
|
|
973d37a237 | ||
|
|
d2ec609e68 | ||
|
|
6b410399cd | ||
|
|
0c010570b9 | ||
|
|
78fc7faa13 | ||
|
|
7671cce263 | ||
|
|
a43a66a2d3 | ||
|
|
2190a2ed25 | ||
|
|
227636b705 | ||
|
|
5032170362 | ||
|
|
b94c3961eb | ||
|
|
46c7e89a94 | ||
|
|
80861fd620 | ||
|
|
44f9390790 | ||
|
|
8eca6c409a | ||
|
|
4907c01191 | ||
|
|
04bf314c61 | ||
|
|
57d92b276b | ||
|
|
6a8efddab5 | ||
|
|
fd908494ae | ||
|
|
d617b23c2f | ||
|
|
27874728bc | ||
|
|
56a0345260 | ||
|
|
c412839165 | ||
|
|
b77f927ad5 | ||
|
|
8edd7f6a56 | ||
|
|
c6915d0291 | ||
|
|
388eb94014 | ||
|
|
9ab80553e1 | ||
|
|
86d639ee6a | ||
|
|
979fd8a249 | ||
|
|
e65ab58f84 | ||
|
|
8414bdbab1 | ||
|
|
d037b09128 | ||
|
|
9a687fec9b | ||
|
|
e9d71f169c | ||
|
|
e09c307d58 | ||
|
|
d23d641b1b | ||
|
|
b1301091f9 | ||
|
|
2458eb3960 | ||
|
|
fa836d88c7 | ||
|
|
e26349f2fc | ||
|
|
daa4e4d566 | ||
|
|
8e75df686d | ||
|
|
53537e7b3a | ||
|
|
4beddc2271 | ||
|
|
a6e4a774e0 | ||
|
|
dacc1c5770 | ||
|
|
25e922bc4c | ||
|
|
c877c9b0fb | ||
|
|
56bb206f25 | ||
|
|
740a9ceaa7 | ||
|
|
64e936127a | ||
|
|
bd4549f389 | ||
|
|
b1f7bd3ead | ||
|
|
b5e3b16e3a | ||
|
|
96a72a2cd7 | ||
|
|
c155da858e | ||
|
|
5e20a5cd71 | ||
|
|
c1b2bbd152 | ||
|
|
e3b5f418d6 | ||
|
|
f82b589d03 | ||
|
|
cddac4d0fb | ||
|
|
dd6f92e54d | ||
|
|
5d4558bddf | ||
|
|
5aa7b5a337 | ||
|
|
2fe0b5b90d | ||
|
|
aa6997990c | ||
|
|
c02ab50a0a | ||
|
|
7cb16b2259 | ||
|
|
3173dc83a5 | ||
|
|
baddc29bb8 | ||
|
|
612cbe6be4 | ||
|
|
4c1d2ab1bb | ||
|
|
6b4704b2e2 | ||
|
|
c2286cde01 | ||
|
|
24a17712e7 | ||
|
|
27d537e7bb | ||
|
|
dbd89c72a3 | ||
|
|
ff41bbd0e5 | ||
|
|
4bdb6ae84e | ||
|
|
cece7b79ad | ||
|
|
8d09d95fc3 | ||
|
|
752542a1d1 | ||
|
|
dd077383f7 | ||
|
|
6e808dbb0f | ||
|
|
4ef3441f70 | ||
|
|
82624d6657 | ||
|
|
62e2b5230c | ||
|
|
3325c30f29 | ||
|
|
18a06168f1 | ||
|
|
27e93e499f | ||
|
|
90644a21a3 | ||
|
|
7e31f43ef1 | ||
|
|
b13fc1fba4 | ||
|
|
5d9109e526 | ||
|
|
78dfa36b2a | ||
|
|
dc05d87b44 | ||
|
|
2c323a13c1 | ||
|
|
d4c5e38857 | ||
|
|
fb80e5c367 | ||
|
|
beb08a3afb | ||
|
|
7b2de8cbbd | ||
|
|
83e63bc87c | ||
|
|
4f5da33fd6 | ||
|
|
d00d003a67 | ||
|
|
002f24be10 | ||
|
|
04992a1d95 | ||
|
|
3c7cf2446e | ||
|
|
29774ac014 | ||
|
|
562d580987 | ||
|
|
d8ad6c0cb0 | ||
|
|
7897b0ebe9 | ||
|
|
e38af9fd16 | ||
|
|
6ffdf5c251 | ||
|
|
69ef7676af | ||
|
|
b0ac57040c | ||
|
|
826ac7f185 | ||
|
|
0623f53f5d | ||
|
|
b5ae875589 | ||
|
|
c152e18e1a | ||
|
|
903f0e5e19 | ||
|
|
6fefd5589c | ||
|
|
58fe14bd31 | ||
|
|
97f362ed1e | ||
|
|
b63e87ecb6 | ||
|
|
ac3550dfd7 | ||
|
|
8278a4cfd9 | ||
|
|
f161a2bbc8 | ||
|
|
6a94489df0 | ||
|
|
c3a0b9192f | ||
|
|
69ff70a9ce | ||
|
|
5284eb0af8 | ||
|
|
58384ae136 | ||
|
|
054cc78e65 | ||
|
|
8c283281d6 | ||
|
|
241fe41756 | ||
|
|
e50e0626fa | ||
|
|
c9135f1573 | ||
|
|
ec2663a152 | ||
|
|
7567042c8a | ||
|
|
c99ceb155f | ||
|
|
f44c92f0d3 | ||
|
|
492701ec62 | ||
|
|
a6d0acaa4d | ||
|
|
f84b4e7274 | ||
|
|
b7ef5b82d8 | ||
|
|
a854d2c38c | ||
|
|
5140499bbd | ||
|
|
7183e9ee85 | ||
|
|
11885e0aca | ||
|
|
2bda4e822c | ||
|
|
8867d12ec7 | ||
|
|
154149a068 | ||
|
|
c96985af03 | ||
|
|
e282420a6a | ||
|
|
b9a207ea71 | ||
|
|
28d52b5e7a | ||
|
|
9761f1ae29 | ||
|
|
e62c8cc2e2 | ||
|
|
b5aea92791 | ||
|
|
2d7724383f | ||
|
|
03f35c1975 | ||
|
|
bc7dad77f4 | ||
|
|
aaa2540114 | ||
|
|
f46787839a | ||
|
|
228be95af1 | ||
|
|
a22d7e40e5 | ||
|
|
d0f87c0980 | ||
|
|
5142783db9 | ||
|
|
4aea16ca8c | ||
|
|
d91d372fc5 | ||
|
|
7405d884de | ||
|
|
a9ae63043e | ||
|
|
6b943866ef | ||
|
|
c7bb94d82a | ||
|
|
30fb855200 | ||
|
|
80f9e56e3f | ||
|
|
d301d967c7 | ||
|
|
7b7bdc4e9c | ||
|
|
796ebca74c | ||
|
|
3150bc316a | ||
|
|
0a91b12e6e | ||
|
|
918e2cc1a9 | ||
|
|
fb71f83d6d | ||
|
|
82470bf04f | ||
|
|
0ac75092e6 | ||
|
|
e898163aff | ||
|
|
418c7e1d9e | ||
|
|
24cbabeaf0 | ||
|
|
91069b989d | ||
|
|
1b7902894a | ||
|
|
47e022897e | ||
|
|
9aada993b1 | ||
|
|
cf837b6d05 | ||
|
|
09192da4fc | ||
|
|
3a792765cd | ||
|
|
a8f1b1c8bc | ||
|
|
8ffdc6bbf8 | ||
|
|
945370bc25 | ||
|
|
ed4b3b0b9c | ||
|
|
83a4268441 | ||
|
|
2938be7a70 | ||
|
|
e3b2ee44ca | ||
|
|
f0c4658c9f | ||
|
|
0a4b236293 | ||
|
|
bc7b53c3d4 | ||
|
|
5535e26eec | ||
|
|
c84c3d58db | ||
|
|
d6caac51dd | ||
|
|
979e7a5e08 | ||
|
|
40f16eb984 | ||
|
|
c17ad1b989 | ||
|
|
24bfa062da | ||
|
|
765f675da9 | ||
|
|
c0650d2ef0 | ||
|
|
168434739f | ||
|
|
337eaa46e3 | ||
|
|
94d42503b7 | ||
|
|
202edc0588 | ||
|
|
c95d11da47 | ||
|
|
4f8615398c | ||
|
|
f3b5f0128f | ||
|
|
ab5e50c29c | ||
|
|
f9236bf92f | ||
|
|
2522968b04 | ||
|
|
9c1900963d | ||
|
|
82ff41e0bb | ||
|
|
fb86c14d77 | ||
|
|
c6c0159ee4 | ||
|
|
fe5bba18a2 | ||
|
|
f61329b5de | ||
|
|
fbc04afa5b | ||
|
|
2f5bcf2263 | ||
|
|
92882c337c | ||
|
|
bd41f69a1c | ||
|
|
f801709587 | ||
|
|
1cb37d29df | ||
|
|
2d7db408fd | ||
|
|
ef1afc99c6 | ||
|
|
5682c9a5b2 | ||
|
|
c525b18a02 | ||
|
|
72159cb94d | ||
|
|
39e31a1039 | ||
|
|
734177fecc | ||
|
|
39311099df | ||
|
|
b8653e6601 | ||
|
|
cb4b1971e6 | ||
|
|
63c60ba716 | ||
|
|
50435425e5 | ||
|
|
ff192f102d | ||
|
|
99cdaa1305 | ||
|
|
7fc897dba9 | ||
|
|
3bedd65ad8 | ||
|
|
a46175ce53 | ||
|
|
dba3bf8ce9 | ||
|
|
3f32234c93 | ||
|
|
2863e64e3b | ||
|
|
68ec78e01c | ||
|
|
3a7c506a8f | ||
|
|
1ca63ed2d2 | ||
|
|
e9e98ebcfc | ||
|
|
04de7998af | ||
|
|
a5d02dc34a | ||
|
|
6181b0466e | ||
|
|
810d8f637d | ||
|
|
223b3e81d5 | ||
|
|
3a8b5bbd3f | ||
|
|
ecf3b33ca7 | ||
|
|
006b20351e | ||
|
|
4b577c9541 | ||
|
|
8db59458a8 | ||
|
|
7eed5f09aa | ||
|
|
a1bb265222 | ||
|
|
0235f33f8b | ||
|
|
3d6fca85db | ||
|
|
4c06da0646 | ||
|
|
f63603eb84 | ||
|
|
44418ef295 | ||
|
|
2a67218a34 | ||
|
|
911586ed0b | ||
|
|
9d6a6620e3 | ||
|
|
598d0acd8e | ||
|
|
f16ece6207 | ||
|
|
9b55bc9892 | ||
|
|
707e67918b | ||
|
|
faac572c30 | ||
|
|
571b37695b | ||
|
|
227adc459f | ||
|
|
2ee36f1a9c | ||
|
|
31830dc67d | ||
|
|
d0ce2a46ac | ||
|
|
7e5bc4e1ce | ||
|
|
d2b6d0a0ff | ||
|
|
542b0658b8 | ||
|
|
e73c7e19b5 | ||
|
|
6a32ed7d7b | ||
|
|
a63001f17c | ||
|
|
4d1ad9c832 | ||
|
|
455bf53ba6 | ||
|
|
454aa6ccda | ||
|
|
85ffebb3fa | ||
|
|
bc99434574 | ||
|
|
9e86020ef7 | ||
|
|
6e9bb0c4f4 | ||
|
|
d66a41a8a3 | ||
|
|
90914bff14 | ||
|
|
62414848f4 | ||
|
|
d4ece6ecd7 | ||
|
|
d1ec60bb63 | ||
|
|
4f672c736b | ||
|
|
2e5c351d8b | ||
|
|
3562553346 | ||
|
|
4750b292a5 | ||
|
|
3eb0561e90 | ||
|
|
abb118c8ca | ||
|
|
2818a229b6 | ||
|
|
a9b8af3677 | ||
|
|
0354da00da | ||
|
|
b179587475 | ||
|
|
3021f90bc5 | ||
|
|
a14b0278c8 | ||
|
|
80070b333e | ||
|
|
3aa8dcac11 | ||
|
|
e920f05611 | ||
|
|
3594afd3aa | ||
|
|
9daaee8212 | ||
|
|
d022707349 | ||
|
|
3948605ae6 | ||
|
|
f2ded5fdd6 | ||
|
|
00b47be181 | ||
|
|
a2fac5d946 | ||
|
|
a00b5bb36b | ||
|
|
d4fbc34085 | ||
|
|
e9e3031992 | ||
|
|
c2c7553f56 | ||
|
|
4e60cb89c9 | ||
|
|
ec4523240f | ||
|
|
1655ddbcaa | ||
|
|
997c677f30 | ||
|
|
d5fc8a2d7e | ||
|
|
3bcd0302a8 | ||
|
|
de91b7e8af | ||
|
|
7efd1d7c9e | ||
|
|
b5151a2178 | ||
|
|
c8432020c6 | ||
|
|
2c9d413a1a | ||
|
|
cdf842e7ad | ||
|
|
c917007949 | ||
|
|
64278c6b3c | ||
|
|
10a01ed14a | ||
|
|
ba3bd1407b | ||
|
|
73666c9a04 | ||
|
|
eae24083c9 | ||
|
|
a644510c27 | ||
|
|
57859d0da2 | ||
|
|
057f0ff648 | ||
|
|
05d1c867f2 | ||
|
|
a2238fa435 | ||
|
|
12b7426a7c | ||
|
|
5148d613a7 | ||
|
|
f455c15882 | ||
|
|
618fdabd0e | ||
|
|
3b69e2896c | ||
|
|
7306b63ab1 | ||
|
|
7e3133caa2 | ||
|
|
560901d714 | ||
|
|
166ce9ae78 | ||
|
|
d3395a685e | ||
|
|
6d5e9a8566 | ||
|
|
69ec03feb4 | ||
|
|
f92982cd5a | ||
|
|
5570f2b464 | ||
|
|
ad19dc0240 | ||
|
|
9b1d4faff8 | ||
|
|
76756d20e9 | ||
|
|
e564500480 | ||
|
|
19c15ce58d | ||
|
|
a027785098 | ||
|
|
36a9f10aae | ||
|
|
99a11a4b53 | ||
|
|
55cac4465c | ||
|
|
ff395fd074 | ||
|
|
972b6e09c7 | ||
|
|
e793a33b15 | ||
|
|
e70d4ff3f3 | ||
|
|
cd0635d3a0 | ||
|
|
81702d8595 | ||
|
|
aaa4a65b04 | ||
|
|
430797e626 | ||
|
|
d454001f49 | ||
|
|
bd90ee1f58 | ||
|
|
196aaa5427 | ||
|
|
6e42233b33 | ||
|
|
8e44df8525 | ||
|
|
a8a1536941 | ||
|
|
99d1728c70 | ||
|
|
6bbb92cdb9 | ||
|
|
b80e7c06bf | ||
|
|
bf467b874c | ||
|
|
43c9f6be56 | ||
|
|
6811a4f4ae | ||
|
|
1f16dd9c43 | ||
|
|
63a43ce104 | ||
|
|
bd7ce5417e | ||
|
|
941ee54a97 | ||
|
|
a5d4a64f47 | ||
|
|
d96fcd4a98 | ||
|
|
de42e2f747 | ||
|
|
822a93aeb6 | ||
|
|
c31b4aaeff | ||
|
|
8c9a386054 | ||
|
|
8c90933615 | ||
|
|
6f8c242333 | ||
|
|
fe8b66873a | ||
|
|
00c5f1365a | ||
|
|
f7d317328a | ||
|
|
3ccd705225 | ||
|
|
9e439fffaa | ||
|
|
859dc170e7 | ||
|
|
1932d8fad9 | ||
|
|
0c814ae436 | ||
|
|
89313d8a37 | ||
|
|
2b85722222 | ||
|
|
57e5b0188c | ||
|
|
2d7c830e70 | ||
|
|
ccaa1790a9 | ||
|
|
f6531d905e | ||
|
|
64a31879d3 | ||
|
|
0c6a4b1ed2 | ||
|
|
67801f39fe | ||
|
|
892a0d67bf | ||
|
|
9fc0b7d5cc | ||
|
|
22a614ef54 | ||
|
|
cd257b8e4d | ||
|
|
fa1ee2ca14 | ||
|
|
34ea1adde6 | ||
|
|
41cf8abb1f | ||
|
|
c0ffec1a4c | ||
|
|
65779b8eaf | ||
|
|
c47bdb2d56 | ||
|
|
d47ae642e7 | ||
|
|
39c4609cc6 | ||
|
|
3ebba02a10 | ||
|
|
4dc7a96e79 | ||
|
|
5a49a29110 | ||
|
|
983a5c2034 | ||
|
|
15829f04a3 | ||
|
|
934618bc1c | ||
|
|
2c5ec75b88 | ||
|
|
df11fd744f | ||
|
|
4dba0fb43d | ||
|
|
7a0d86b8dd | ||
|
|
a94cd98e0f | ||
|
|
8e95e51edc | ||
|
|
6f1b00284a | ||
|
|
58549a6cac | ||
|
|
acc9a6118f | ||
|
|
c7811e861c | ||
|
|
55cf766ff0 | ||
|
|
a1eaf38324 | ||
|
|
c6788092d3 | ||
|
|
f89f74ef3f | ||
|
|
3e40f02001 | ||
|
|
c169967c1b | ||
|
|
2830e7c569 | ||
|
|
415f08ba3a | ||
|
|
d726bcdc19 | ||
|
|
f259c25a70 | ||
|
|
4db937cf1f | ||
|
|
dad9d0660c | ||
|
|
0c450a5bb2 | ||
|
|
ef59819c01 | ||
|
|
c651e7c84b | ||
|
|
20b8debb1c | ||
|
|
dd5743f0a1 | ||
|
|
7da2b51fae | ||
|
|
0236800392 | ||
|
|
4f822878f7 | ||
|
|
c2810e5fe5 | ||
|
|
b89ba4b801 | ||
|
|
07c680b839 | ||
|
|
fd50db4eab | ||
|
|
0ee95b36a6 | ||
|
|
b8cf07149e | ||
|
|
1b699f1a87 | ||
|
|
d3bfd238d3 | ||
|
|
1f43abb3c8 | ||
|
|
287c753e4a | ||
|
|
8a5374d31a | ||
|
|
e219eaa934 | ||
|
|
fd314480ca | ||
|
|
dd45396cf3 | ||
|
|
1e2a56c5e9 | ||
|
|
8011773af4 | ||
|
|
ddc69c692e | ||
|
|
df925c9744 | ||
|
|
1726341aad | ||
|
|
63b1ccc7a7 | ||
|
|
ee5db31518 | ||
|
|
e80397c857 | ||
|
|
81aa7ca1a4 | ||
|
|
f0f7695890 | ||
|
|
e7e8ce2f7a | ||
|
|
ba37a3f18d | ||
|
|
60b11a7a5d | ||
|
|
29461c20a7 | ||
|
|
2ff1f34543 | ||
|
|
b75d7f970f | ||
|
|
204681f097 | ||
|
|
e239fe95a4 | ||
|
|
0a101f061a | ||
|
|
f112a17afa | ||
|
|
54658a66d2 | ||
|
|
6b8f5a76e4 | ||
|
|
623a5d338d | ||
|
|
9c5565cfd5 | ||
|
|
722f2efaee | ||
|
|
4928264204 | ||
|
|
12d62ddc2a | ||
|
|
da54e97217 | ||
|
|
9c0993dac8 | ||
|
|
175486b7c4 | ||
|
|
4760a287f6 | ||
|
|
0237b48c87 | ||
|
|
95c9f22e6c | ||
|
|
9b001219d5 | ||
|
|
6ff15efc7b | ||
|
|
6fe1dccc7e | ||
|
|
1c80f6f3fa | ||
|
|
54d3177fdd | ||
|
|
a24ad245d2 | ||
|
|
f38cfdcadf | ||
|
|
92e4ad8ccd | ||
|
|
3f3ab088d2 | ||
|
|
2c2cbaa175 | ||
|
|
911b6bf863 | ||
|
|
31462cab64 | ||
|
|
1ee35da62d | ||
|
|
edf4815595 | ||
|
|
06ccee5d18 | ||
|
|
d5ad85725f | ||
|
|
4d5bddb413 | ||
|
|
2f4da7c381 | ||
|
|
8b845fce03 | ||
|
|
9fd15c38a9 | ||
|
|
ec1573d01f | ||
|
|
92ec1cc9e7 | ||
|
|
8b2f9665ce | ||
|
|
cb388a5a78 | ||
|
|
7f4389ae08 | ||
|
|
76d71beaa2 | ||
|
|
31bb9c2197 | ||
|
|
6a2cd5c45a | ||
|
|
520632514b | ||
|
|
f998b28d0b | ||
|
|
1a6587e9e6 | ||
|
|
9b4b729d19 | ||
|
|
e80345295e | ||
|
|
026c259a2e | ||
|
|
63474c2269 | ||
|
|
faa1a9312f | ||
|
|
23fa0726d5 | ||
|
|
22210eaf7d | ||
|
|
dcd8bee676 | ||
|
|
06f0fa8f0e | ||
|
|
6d0f9e2cd5 | ||
|
|
732afdb65d | ||
|
|
1a9e8742f7 | ||
|
|
b8eda37339 | ||
|
|
5107db6169 | ||
|
|
2c8f207454 | ||
|
|
489bc9c3b3 | ||
|
|
514713e883 | ||
|
|
17cc0cd09c | ||
|
|
4475df1295 | ||
|
|
fdad267cfd | ||
|
|
3684fc80f0 | ||
|
|
e97a5fef94 | ||
|
|
de2972631f | ||
|
|
e5b8fd67c8 | ||
|
|
5fade89e2d | ||
|
|
2eefedadb3 | ||
|
|
e63d7a0b8a | ||
|
|
2a1b1849fa | ||
|
|
0461cb7f19 | ||
|
|
0932e0be03 | ||
|
|
4638ac9474 | ||
|
|
d8d7255029 | ||
|
|
fa05276c3f | ||
|
|
e50a5d51d8 | ||
|
|
c03ba78587 | ||
|
|
ff07c69e7d | ||
|
|
735b84b26d | ||
|
|
8dd069ad67 | ||
|
|
1857e68003 | ||
|
|
ff2508382a | ||
|
|
9cb952b116 | ||
|
|
105e8089bb | ||
|
|
730f37f247 | ||
|
|
284716751f | ||
|
|
8d0db699bf | ||
|
|
53cf1cae58 | ||
|
|
307e4719e0 | ||
|
|
5effae787a | ||
|
|
6532be0b52 | ||
|
|
fb225a5347 | ||
|
|
b83830a45e | ||
|
|
ca28288c33 | ||
|
|
b6f8d9cb25 | ||
|
|
9cad0f11e5 | ||
|
|
807be08566 | ||
|
|
67f6a985f8 | ||
|
|
f87d54ae8d | ||
|
|
d894bf7271 | ||
|
|
56e0e5cace | ||
|
|
685084e784 | ||
|
|
cbeec5a973 | ||
|
|
3fff56bcd7 | ||
|
|
c504c23eec | ||
|
|
16dae5a655 | ||
|
|
e512c5ae7d | ||
|
|
094078b928 | ||
|
|
34fc3ff919 | ||
|
|
4391f48e78 | ||
|
|
775608a3c0 | ||
|
|
b326228901 | ||
|
|
b2e98173a8 | ||
|
|
65c9b7952c | ||
|
|
b9dc9e7d62 | ||
|
|
ce178d0354 | ||
|
|
a3ff6efebc | ||
|
|
6a9bc56723 | ||
|
|
c9ac158d25 | ||
|
|
4b937a0fe8 | ||
|
|
405bf26ac5 | ||
|
|
5dcda0e0a0 | ||
|
|
83e9b60308 | ||
|
|
10b40b4730 | ||
|
|
79d6d804ef | ||
|
|
e9c7b6d8f8 | ||
|
|
4fcfbfb3f4 | ||
|
|
30cde14ed3 | ||
|
|
cf76e6f538 | ||
|
|
d0f600ec8d | ||
|
|
675f9e956f | ||
|
|
381605a6bb | ||
|
|
0fce66062b | ||
|
|
747cc9e5da | ||
|
|
25a1b464da | ||
|
|
3b6738b547 | ||
|
|
fc93e3e97f | ||
|
|
0edbb13d48 | ||
|
|
673687341c | ||
|
|
3969208942 | ||
|
|
3fa89b58df | ||
|
|
a43a9c8543 | ||
|
|
45deda4dea | ||
|
|
6ec46f02a9 | ||
|
|
d643c17ff1 | ||
|
|
e5de89c6b4 | ||
|
|
c21e7c632d | ||
|
|
6ae771682a | ||
|
|
bf2075b902 | ||
|
|
62ec8c8f76 | ||
|
|
b84d4a99b8 | ||
|
|
cce9dfe585 | ||
|
|
166be395b9 | ||
|
|
fa3f5f8d68 | ||
|
|
2926b68c32 | ||
|
|
a55f187958 | ||
|
|
c76d263375 | ||
|
|
6740d97f8f | ||
|
|
b079eebe79 | ||
|
|
363e48a1e8 | ||
|
|
f60e4e3e4f | ||
|
|
1b02974efa | ||
|
|
496abdd230 | ||
|
|
bc495d77d1 | ||
|
|
fb54d4bb64 | ||
|
|
0786163dc3 | ||
|
|
ed85611e75 | ||
|
|
86ebfce44a | ||
|
|
dae51cff51 | ||
|
|
358a2e7220 | ||
|
|
d45353e8c8 | ||
|
|
2f56e4e3a1 | ||
|
|
0e503f8273 | ||
|
|
876fe803f5 | ||
|
|
6adb9678b6 | ||
|
|
39bf7ba4a9 | ||
|
|
5da6e2ff99 | ||
|
|
44603c41a2 | ||
|
|
0feb982a73 | ||
|
|
d93cb32f2e | ||
|
|
40c47eace2 | ||
|
|
509bdd879c | ||
|
|
b98ebb6e9f | ||
|
|
924ddecff0 | ||
|
|
ca64fd218d | ||
|
|
9b12b55acd | ||
|
|
450239564a | ||
|
|
bb1cc62d2a | ||
|
|
b4875c1e2d | ||
|
|
a21440d663 | ||
|
|
eb6836b63c | ||
|
|
b39a2690c1 | ||
|
|
706902da1c | ||
|
|
d5104b5d27 | ||
|
|
a13ae5c4b1 | ||
|
|
a92d1d9958 | ||
|
|
10852a9427 | ||
|
|
b757ce1e38 | ||
|
|
91e75f3fa2 | ||
|
|
6c8e55eb2f | ||
|
|
f821f700fa | ||
|
|
d76d24408f | ||
|
|
7ad85dfe1c | ||
|
|
7d8be0a719 | ||
|
|
bac15c18e4 | ||
|
|
2f266d39e6 | ||
|
|
5726d1fc52 | ||
|
|
69aee1823e | ||
|
|
e6a0ae5f57 | ||
|
|
e5df566c7a | ||
|
|
81e173b609 | ||
|
|
d0ebcc6606 | ||
|
|
99c3fcf42a | ||
|
|
794666e7cc | ||
|
|
45abe4955d | ||
|
|
7eed421c70 | ||
|
|
69f7c397c2 | ||
|
|
d2d136e922 | ||
|
|
396e435ae0 | ||
|
|
45d8e9102a | ||
|
|
12a51deffa | ||
|
|
f2f69abec2 | ||
|
|
02b7f962e9 | ||
|
|
eb813e6b22 | ||
|
|
5ddc604341 | ||
|
|
313e672e93 | ||
|
|
ce77ad6de4 | ||
|
|
bea22690b1 | ||
|
|
c9a52bd7d0 | ||
|
|
a244a341ec | ||
|
|
2b47870032 | ||
|
|
de9e35ae6a | ||
|
|
1a6fec8ca9 | ||
|
|
094054cd99 | ||
|
|
f85b8a81f1 | ||
|
|
a44eaebf7c | ||
|
|
f37b3c063e | ||
|
|
6e5d5a3b82 | ||
|
|
bf0562d619 | ||
|
|
ecaa81be3c | ||
|
|
d98ae48935 | ||
|
|
f52a76b16c | ||
|
|
d421c27602 | ||
|
|
70e4cd4de1 | ||
|
|
29767e9265 | ||
|
|
46d4c7f96d | ||
|
|
161a6f3923 | ||
|
|
53e912341b | ||
|
|
19396ea11a | ||
|
|
1d9a5e742b | ||
|
|
e8dfdd03f7 | ||
|
|
2f5b15dac7 | ||
|
|
525e1f5136 | ||
|
|
7d63d188af | ||
|
|
87889c12ea | ||
|
|
53d023f5ee | ||
|
|
1877ab8c67 | ||
|
|
72a5a8cab7 | ||
|
|
221e49a978 | ||
|
|
1a4c67d173 | ||
|
|
42fd23ece3 | ||
|
|
3035c0712a | ||
|
|
61315f8bfd | ||
|
|
52683124d8 | ||
|
|
1f77390366 | ||
|
|
322d492540 | ||
|
|
f977d8cca9 | ||
|
|
a9aedea2bd | ||
|
|
5560bbeecb | ||
|
|
f226206703 | ||
|
|
170687226d | ||
|
|
d56d3dc271 | ||
|
|
32a202aff4 | ||
|
|
6ee75e6e60 | ||
|
|
13d74cae3b | ||
|
|
88651916b0 | ||
|
|
be12505d2f | ||
|
|
23fcf3b045 | ||
|
|
9e7459b204 | ||
|
|
4f0eb1d566 | ||
|
|
ce00481f47 | ||
|
|
f596af90ba | ||
|
|
5c74d1d021 | ||
|
|
aff659b6b6 | ||
|
|
58724d95fa | ||
|
|
8d61fcd5c9 | ||
|
|
3e1be53c36 | ||
|
|
f3754588bd | ||
|
|
c4ffffeec8 | ||
|
|
5b69f6a358 | ||
|
|
1af89a7447 | ||
|
|
90abd81035 | ||
|
|
898824b13f | ||
|
|
9d093aa7f8 | ||
|
|
1770549f6c | ||
|
|
d21be77fd2 | ||
|
|
41a1c19877 | ||
|
|
9b6571ce68 | ||
|
|
88e98e4e35 | ||
|
|
10c56ffbfa | ||
|
|
cb2c8d6f3c | ||
|
|
ca62b850ce | ||
|
|
5a75d4e140 | ||
|
|
e0972b7c24 | ||
|
|
0db497916d | ||
|
|
23a0ad3c4e | ||
|
|
2b4e1c4b67 | ||
|
|
9b1b9244cf | ||
|
|
ad570e9b16 | ||
|
|
812ba6de62 | ||
|
|
8f97124adb | ||
|
|
28289838f9 | ||
|
|
cca8a010c3 | ||
|
|
91ab296692 | ||
|
|
ee6c9c4272 | ||
|
|
21cd36fa92 | ||
|
|
b1aafe3dbc | ||
|
|
5cd832de89 | ||
|
|
24dd9d0518 | ||
|
|
aab6ab810a | ||
|
|
d1d6d5e71e | ||
|
|
e67dd68522 | ||
|
|
e25eae846d | ||
|
|
995eeaa455 | ||
|
|
240c61b967 | ||
|
|
2d8b0753b4 | ||
|
|
44eab3de7f | ||
|
|
007be5bf95 | ||
|
|
ee19c7c51f | ||
|
|
ce56afbdf9 | ||
|
|
51012695a1 | ||
|
|
0eef2d2cc5 | ||
|
|
487f9f2815 | ||
|
|
d065adcd8e | ||
|
|
0d9a1dc5eb | ||
|
|
8f9ad15108 | ||
|
|
e538e9b843 | ||
|
|
4a702b6813 | ||
|
|
1e6fd2c57a | ||
|
|
600b959d89 | ||
|
|
b96de9eb13 | ||
|
|
93be19b647 | ||
|
|
74f45f6f1d | ||
|
|
54ba3d2888 | ||
|
|
65d5149f60 | ||
|
|
917ebb3771 | ||
|
|
7e66b1f545 | ||
|
|
05837dca35 | ||
|
|
53be2ebe59 | ||
|
|
0341efcaea | ||
|
|
ec75210fd3 | ||
|
|
e6afe3e806 | ||
|
|
5aa46f068e | ||
|
|
a11a5b28bc | ||
|
|
907aa566ca | ||
|
|
5c21f099a8 | ||
|
|
b91201ae3e | ||
|
|
56d7e19968 | ||
|
|
cf91c6c90e | ||
|
|
9011148adf | ||
|
|
897d0590d2 | ||
|
|
33b33e8458 | ||
|
|
7758f5c187 | ||
|
|
83d7a03ba4 | ||
|
|
a9a0df9699 | ||
|
|
df44f8f5f8 | ||
|
|
216a9ed035 | ||
|
|
35d61b6a6c | ||
|
|
5fb72cea53 | ||
|
|
d54d021e9f | ||
|
|
06e78311df | ||
|
|
df720f95ca | ||
|
|
00faff34d3 | ||
|
|
2b5b3ea4f3 | ||
|
|
95e608d0b4 | ||
|
|
1d55bf87dd | ||
|
|
1220ce53eb | ||
|
|
2006218f87 | ||
|
|
40f427a387 | ||
|
|
445e95baed | ||
|
|
67fbc9ad33 | ||
|
|
1253e9e465 | ||
|
|
21069432e8 | ||
|
|
6facf6a324 | ||
|
|
7556197485 | ||
|
|
8dddd2d896 | ||
|
|
f319c95c2b | ||
|
|
8e972b0907 | ||
|
|
395e400215 | ||
|
|
3685e3111f | ||
|
|
7bb1c75dc6 | ||
|
|
b20834929c | ||
|
|
181891757e | ||
|
|
b16feeae44 | ||
|
|
684e049f27 | ||
|
|
8cebd901b2 | ||
|
|
3c96beb8fb | ||
|
|
8a46459cf9 | ||
|
|
be5c3e9daa | ||
|
|
e44453877c | ||
|
|
f772a4ec56 | ||
|
|
44182ec683 | ||
|
|
b9ab13fa53 | ||
|
|
2ad6721c95 | ||
|
|
b7d0604e62 | ||
|
|
a7518b4b26 | ||
|
|
50613f5d3e | ||
|
|
f814767703 | ||
|
|
4af86d6456 | ||
|
|
f0a4f00c2d | ||
|
|
4321affddb | ||
|
|
926ed55b9b | ||
|
|
2ebf308565 | ||
|
|
1c5e736dce | ||
|
|
b591f9f5b7 | ||
|
|
9724882578 | ||
|
|
ddef2df101 | ||
|
|
8af69c4284 | ||
|
|
6ebe1ab467 | ||
|
|
24e4d9cf6d | ||
|
|
f35fa0aa58 | ||
|
|
403762d862 | ||
|
|
6294530fa3 |
@@ -23,7 +23,7 @@ POSTGRES_USER=postgres
|
|||||||
POSTGRES_PASS=postgrespass
|
POSTGRES_PASS=postgrespass
|
||||||
|
|
||||||
# DEV SETTINGS
|
# DEV SETTINGS
|
||||||
APP_PORT=80
|
APP_PORT=443
|
||||||
API_PORT=80
|
API_PORT=80
|
||||||
HTTP_PROTOCOL=https
|
HTTP_PROTOCOL=https
|
||||||
DOCKER_NETWORK=172.21.0.0/24
|
DOCKER_NETWORK=172.21.0.0/24
|
||||||
|
|||||||
@@ -1,4 +1,11 @@
|
|||||||
FROM python:3.9.6-slim
|
# pulls community scripts from git repo
|
||||||
|
FROM python:3.10-slim AS GET_SCRIPTS_STAGE
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends git && \
|
||||||
|
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||||
|
|
||||||
|
FROM python:3.10-slim
|
||||||
|
|
||||||
ENV TACTICAL_DIR /opt/tactical
|
ENV TACTICAL_DIR /opt/tactical
|
||||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||||
@@ -10,12 +17,14 @@ ENV PYTHONUNBUFFERED=1
|
|||||||
|
|
||||||
EXPOSE 8000 8383 8005
|
EXPOSE 8000 8383 8005
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y build-essential
|
||||||
|
|
||||||
RUN groupadd -g 1000 tactical && \
|
RUN groupadd -g 1000 tactical && \
|
||||||
useradd -u 1000 -g 1000 tactical
|
useradd -u 1000 -g 1000 tactical
|
||||||
|
|
||||||
# Copy nats-api file
|
# copy community scripts
|
||||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
COPY --from=GET_SCRIPTS_STAGE /community-scripts /community-scripts
|
||||||
RUN chmod +x /usr/local/bin/nats-api
|
|
||||||
|
|
||||||
# Copy dev python reqs
|
# Copy dev python reqs
|
||||||
COPY .devcontainer/requirements.txt /
|
COPY .devcontainer/requirements.txt /
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
version: '3.4'
|
|
||||||
|
|
||||||
services:
|
|
||||||
api-dev:
|
|
||||||
image: api-dev
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./api.dockerfile
|
|
||||||
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
|
|
||||||
ports:
|
|
||||||
- 8000:8000
|
|
||||||
- 5678:5678
|
|
||||||
volumes:
|
|
||||||
- tactical-data-dev:/opt/tactical
|
|
||||||
- ..:/workspace:cached
|
|
||||||
networks:
|
|
||||||
dev:
|
|
||||||
aliases:
|
|
||||||
- tactical-backend
|
|
||||||
@@ -5,10 +5,11 @@ services:
|
|||||||
container_name: trmm-api-dev
|
container_name: trmm-api-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
build:
|
build:
|
||||||
context: ..
|
context: ..
|
||||||
dockerfile: .devcontainer/api.dockerfile
|
dockerfile: .devcontainer/api.dockerfile
|
||||||
command: ["tactical-api"]
|
command: [ "tactical-api" ]
|
||||||
environment:
|
environment:
|
||||||
API_PORT: ${API_PORT}
|
API_PORT: ${API_PORT}
|
||||||
ports:
|
ports:
|
||||||
@@ -23,7 +24,7 @@ services:
|
|||||||
|
|
||||||
app-dev:
|
app-dev:
|
||||||
container_name: trmm-app-dev
|
container_name: trmm-app-dev
|
||||||
image: node:14-alpine
|
image: node:16-alpine
|
||||||
restart: always
|
restart: always
|
||||||
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||||
working_dir: /workspace/web
|
working_dir: /workspace/web
|
||||||
@@ -41,6 +42,7 @@ services:
|
|||||||
container_name: trmm-nats-dev
|
container_name: trmm-nats-dev
|
||||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
environment:
|
environment:
|
||||||
API_HOST: ${API_HOST}
|
API_HOST: ${API_HOST}
|
||||||
API_PORT: ${API_PORT}
|
API_PORT: ${API_PORT}
|
||||||
@@ -61,6 +63,7 @@ services:
|
|||||||
container_name: trmm-meshcentral-dev
|
container_name: trmm-meshcentral-dev
|
||||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
environment:
|
environment:
|
||||||
MESH_HOST: ${MESH_HOST}
|
MESH_HOST: ${MESH_HOST}
|
||||||
MESH_USER: ${MESH_USER}
|
MESH_USER: ${MESH_USER}
|
||||||
@@ -84,6 +87,7 @@ services:
|
|||||||
container_name: trmm-mongodb-dev
|
container_name: trmm-mongodb-dev
|
||||||
image: mongo:4.4
|
image: mongo:4.4
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
environment:
|
environment:
|
||||||
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||||
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||||
@@ -101,7 +105,7 @@ services:
|
|||||||
image: postgres:13-alpine
|
image: postgres:13-alpine
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: tacticalrmm
|
POSTGRES_DB: ${POSTGRES_DB}
|
||||||
POSTGRES_USER: ${POSTGRES_USER}
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||||
volumes:
|
volumes:
|
||||||
@@ -115,7 +119,8 @@ services:
|
|||||||
redis-dev:
|
redis-dev:
|
||||||
container_name: trmm-redis-dev
|
container_name: trmm-redis-dev
|
||||||
restart: always
|
restart: always
|
||||||
command: redis-server --appendonly yes
|
user: 1000:1000
|
||||||
|
command: redis-server
|
||||||
image: redis:6.0-alpine
|
image: redis:6.0-alpine
|
||||||
volumes:
|
volumes:
|
||||||
- redis-data-dev:/data
|
- redis-data-dev:/data
|
||||||
@@ -128,7 +133,7 @@ services:
|
|||||||
container_name: trmm-init-dev
|
container_name: trmm-init-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
command: ["tactical-init-dev"]
|
command: [ "tactical-init-dev" ]
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: ${POSTGRES_USER}
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||||
@@ -140,6 +145,7 @@ services:
|
|||||||
TRMM_PASS: ${TRMM_PASS}
|
TRMM_PASS: ${TRMM_PASS}
|
||||||
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
||||||
APP_PORT: ${APP_PORT}
|
APP_PORT: ${APP_PORT}
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB}
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres-dev
|
- postgres-dev
|
||||||
- meshcentral-dev
|
- meshcentral-dev
|
||||||
@@ -147,14 +153,18 @@ services:
|
|||||||
- dev
|
- dev
|
||||||
volumes:
|
volumes:
|
||||||
- tactical-data-dev:/opt/tactical
|
- tactical-data-dev:/opt/tactical
|
||||||
|
- mesh-data-dev:/meshcentral-data
|
||||||
|
- redis-data-dev:/redis/data
|
||||||
|
- mongo-dev-data:/mongo/data/db
|
||||||
- ..:/workspace:cached
|
- ..:/workspace:cached
|
||||||
|
|
||||||
# container for celery worker service
|
# container for celery worker service
|
||||||
celery-dev:
|
celery-dev:
|
||||||
container_name: trmm-celery-dev
|
container_name: trmm-celery-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
command: ["tactical-celery-dev"]
|
command: [ "tactical-celery-dev" ]
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
networks:
|
networks:
|
||||||
- dev
|
- dev
|
||||||
volumes:
|
volumes:
|
||||||
@@ -168,8 +178,9 @@ services:
|
|||||||
celerybeat-dev:
|
celerybeat-dev:
|
||||||
container_name: trmm-celerybeat-dev
|
container_name: trmm-celerybeat-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
command: ["tactical-celerybeat-dev"]
|
command: [ "tactical-celerybeat-dev" ]
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
networks:
|
networks:
|
||||||
- dev
|
- dev
|
||||||
volumes:
|
volumes:
|
||||||
@@ -183,8 +194,9 @@ services:
|
|||||||
websockets-dev:
|
websockets-dev:
|
||||||
container_name: trmm-websockets-dev
|
container_name: trmm-websockets-dev
|
||||||
image: api-dev
|
image: api-dev
|
||||||
command: ["tactical-websockets-dev"]
|
command: [ "tactical-websockets-dev" ]
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
aliases:
|
aliases:
|
||||||
@@ -201,6 +213,7 @@ services:
|
|||||||
container_name: trmm-nginx-dev
|
container_name: trmm-nginx-dev
|
||||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||||
restart: always
|
restart: always
|
||||||
|
user: 1000:1000
|
||||||
environment:
|
environment:
|
||||||
APP_HOST: ${APP_HOST}
|
APP_HOST: ${APP_HOST}
|
||||||
API_HOST: ${API_HOST}
|
API_HOST: ${API_HOST}
|
||||||
@@ -209,33 +222,22 @@ services:
|
|||||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||||
APP_PORT: ${APP_PORT}
|
APP_PORT: ${APP_PORT}
|
||||||
API_PORT: ${API_PORT}
|
API_PORT: ${API_PORT}
|
||||||
|
DEV: 1
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
ipv4_address: ${DOCKER_NGINX_IP}
|
ipv4_address: ${DOCKER_NGINX_IP}
|
||||||
ports:
|
ports:
|
||||||
- "80:80"
|
- "80:8080"
|
||||||
- "443:443"
|
- "443:4443"
|
||||||
volumes:
|
volumes:
|
||||||
- tactical-data-dev:/opt/tactical
|
- tactical-data-dev:/opt/tactical
|
||||||
|
|
||||||
mkdocs-dev:
|
|
||||||
container_name: trmm-mkdocs-dev
|
|
||||||
image: api-dev
|
|
||||||
restart: always
|
|
||||||
command: ["tactical-mkdocs-dev"]
|
|
||||||
ports:
|
|
||||||
- "8005:8005"
|
|
||||||
volumes:
|
|
||||||
- ..:/workspace:cached
|
|
||||||
networks:
|
|
||||||
- dev
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
tactical-data-dev:
|
tactical-data-dev: null
|
||||||
postgres-data-dev:
|
postgres-data-dev: null
|
||||||
mongo-dev-data:
|
mongo-dev-data: null
|
||||||
mesh-data-dev:
|
mesh-data-dev: null
|
||||||
redis-data-dev:
|
redis-data-dev: null
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
dev:
|
dev:
|
||||||
|
|||||||
@@ -9,7 +9,8 @@ set -e
|
|||||||
: "${POSTGRES_USER:=tactical}"
|
: "${POSTGRES_USER:=tactical}"
|
||||||
: "${POSTGRES_PASS:=tactical}"
|
: "${POSTGRES_PASS:=tactical}"
|
||||||
: "${POSTGRES_DB:=tacticalrmm}"
|
: "${POSTGRES_DB:=tacticalrmm}"
|
||||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
: "${MESH_SERVICE:=tactical-meshcentral}"
|
||||||
|
: "${MESH_WS_URL:=ws://${MESH_SERVICE}:4443}"
|
||||||
: "${MESH_USER:=meshcentral}"
|
: "${MESH_USER:=meshcentral}"
|
||||||
: "${MESH_PASS:=meshcentralpass}"
|
: "${MESH_PASS:=meshcentralpass}"
|
||||||
: "${MESH_HOST:=tactical-meshcentral}"
|
: "${MESH_HOST:=tactical-meshcentral}"
|
||||||
@@ -20,6 +21,9 @@ set -e
|
|||||||
: "${APP_PORT:=8080}"
|
: "${APP_PORT:=8080}"
|
||||||
: "${API_PORT:=8000}"
|
: "${API_PORT:=8000}"
|
||||||
|
|
||||||
|
: "${CERT_PRIV_PATH:=${TACTICAL_DIR}/certs/privkey.pem}"
|
||||||
|
: "${CERT_PUB_PATH:=${TACTICAL_DIR}/certs/fullchain.pem}"
|
||||||
|
|
||||||
# Add python venv to path
|
# Add python venv to path
|
||||||
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||||
|
|
||||||
@@ -37,7 +41,7 @@ function django_setup {
|
|||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
|
|
||||||
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
until (echo > /dev/tcp/"${MESH_SERVICE}"/4443) &> /dev/null; do
|
||||||
echo "waiting for meshcentral container to be ready..."
|
echo "waiting for meshcentral container to be ready..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
@@ -56,10 +60,12 @@ DEBUG = True
|
|||||||
|
|
||||||
DOCKER_BUILD = True
|
DOCKER_BUILD = True
|
||||||
|
|
||||||
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
SWAGGER_ENABLED = True
|
||||||
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
|
||||||
|
|
||||||
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
CERT_FILE = '${CERT_PUB_PATH}'
|
||||||
|
KEY_FILE = '${CERT_PRIV_PATH}'
|
||||||
|
|
||||||
|
SCRIPTS_DIR = '/community-scripts'
|
||||||
|
|
||||||
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||||
|
|
||||||
@@ -78,28 +84,11 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
REST_FRAMEWORK = {
|
|
||||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
|
||||||
|
|
||||||
'DEFAULT_PERMISSION_CLASSES': (
|
|
||||||
'rest_framework.permissions.IsAuthenticated',
|
|
||||||
),
|
|
||||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
|
||||||
'knox.auth.TokenAuthentication',
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
if not DEBUG:
|
|
||||||
REST_FRAMEWORK.update({
|
|
||||||
'DEFAULT_RENDERER_CLASSES': (
|
|
||||||
'rest_framework.renderers.JSONRenderer',
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
MESH_USERNAME = '${MESH_USER}'
|
MESH_USERNAME = '${MESH_USER}'
|
||||||
MESH_SITE = 'https://${MESH_HOST}'
|
MESH_SITE = 'https://${MESH_HOST}'
|
||||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||||
REDIS_HOST = '${REDIS_HOST}'
|
REDIS_HOST = '${REDIS_HOST}'
|
||||||
|
MESH_WS_URL = '${MESH_WS_URL}'
|
||||||
ADMIN_ENABLED = True
|
ADMIN_ENABLED = True
|
||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
@@ -107,6 +96,7 @@ EOF
|
|||||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||||
|
|
||||||
# run migrations and init scripts
|
# run migrations and init scripts
|
||||||
|
"${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
||||||
@@ -114,7 +104,10 @@ EOF
|
|||||||
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||||
|
"${VIRTUAL_ENV}"/bin/python manage.py create_natsapi_conf
|
||||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||||
|
"${VIRTUAL_ENV}"/bin/python manage.py post_update_tasks
|
||||||
|
|
||||||
|
|
||||||
# create super user
|
# create super user
|
||||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||||
@@ -127,8 +120,24 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
|||||||
|
|
||||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||||
|
|
||||||
|
mkdir -p /meshcentral-data
|
||||||
|
mkdir -p ${TACTICAL_DIR}/tmp
|
||||||
|
mkdir -p ${TACTICAL_DIR}/certs
|
||||||
|
mkdir -p /mongo/data/db
|
||||||
|
mkdir -p /redis/data
|
||||||
|
touch /meshcentral-data/.initialized && chown -R 1000:1000 /meshcentral-data
|
||||||
|
touch ${TACTICAL_DIR}/tmp/.initialized && chown -R 1000:1000 ${TACTICAL_DIR}
|
||||||
|
touch ${TACTICAL_DIR}/certs/.initialized && chown -R 1000:1000 ${TACTICAL_DIR}/certs
|
||||||
|
touch /mongo/data/db/.initialized && chown -R 1000:1000 /mongo/data/db
|
||||||
|
touch /redis/data/.initialized && chown -R 1000:1000 /redis/data
|
||||||
|
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
|
||||||
|
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
|
||||||
|
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
|
||||||
|
|
||||||
# setup Python virtual env and install dependencies
|
# setup Python virtual env and install dependencies
|
||||||
! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV}
|
! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV}
|
||||||
|
"${VIRTUAL_ENV}"/bin/python -m pip install --upgrade pip
|
||||||
|
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir setuptools wheel
|
||||||
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
||||||
|
|
||||||
django_setup
|
django_setup
|
||||||
@@ -171,8 +180,3 @@ if [ "$1" = 'tactical-websockets-dev' ]; then
|
|||||||
check_tactical_ready
|
check_tactical_ready
|
||||||
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$1" = 'tactical-mkdocs-dev' ]; then
|
|
||||||
cd "${WORKSPACE_DIR}/docs"
|
|
||||||
"${VIRTUAL_ENV}"/bin/mkdocs serve
|
|
||||||
fi
|
|
||||||
|
|||||||
@@ -1,37 +1,41 @@
|
|||||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||||
asyncio-nats-client
|
asgiref==3.5.0
|
||||||
celery
|
celery==5.2.6
|
||||||
channels
|
channels==3.0.4
|
||||||
channels_redis
|
channels_redis==3.4.0
|
||||||
django-ipware
|
daphne==3.0.2
|
||||||
Django
|
Django==4.0.4
|
||||||
django-cors-headers
|
django-cors-headers==3.11.0
|
||||||
django-rest-knox
|
django-ipware==4.0.2
|
||||||
djangorestframework
|
django-rest-knox==4.2.0
|
||||||
loguru
|
djangorestframework==3.13.1
|
||||||
msgpack
|
future==0.18.2
|
||||||
psycopg2-binary
|
msgpack==1.0.3
|
||||||
pycparser
|
nats-py==2.1.0
|
||||||
pycryptodome
|
packaging==21.3
|
||||||
pyotp
|
psycopg2-binary==2.9.3
|
||||||
pyparsing
|
pycryptodome==3.14.1
|
||||||
pytz
|
pyotp==2.6.0
|
||||||
qrcode
|
pytz==2022.1
|
||||||
redis
|
qrcode==7.3.1
|
||||||
twilio
|
redis==4.2.2
|
||||||
packaging
|
requests==2.27.1
|
||||||
validators
|
twilio==7.8.1
|
||||||
websockets
|
urllib3==1.26.9
|
||||||
black
|
validators==0.18.2
|
||||||
Werkzeug
|
websockets==10.2
|
||||||
django-extensions
|
drf_spectacular==0.22.0
|
||||||
coverage
|
meshctrl==0.1.15
|
||||||
coveralls
|
hiredis==2.0.0
|
||||||
model_bakery
|
|
||||||
mkdocs
|
# dev
|
||||||
mkdocs-material
|
black==22.3.0
|
||||||
pymdown-extensions
|
django-extensions==3.1.5
|
||||||
Pygments
|
isort==5.10.1
|
||||||
mypy
|
mypy==0.942
|
||||||
pysnooper
|
types-pytz==2021.3.6
|
||||||
isort
|
model-bakery==1.5.0
|
||||||
|
coverage==6.3.2
|
||||||
|
django-silk==4.3.0
|
||||||
|
django-stubs==1.10.1
|
||||||
|
djangorestframework-stubs==1.5.0
|
||||||
66
.github/workflows/ci-tests.yml
vendored
Normal file
66
.github/workflows/ci-tests.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
name: Tests CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "*"
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- "*"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: self-hosted
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Setup virtual env and install requirements
|
||||||
|
run: |
|
||||||
|
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline'
|
||||||
|
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline'
|
||||||
|
sudo -u postgres psql -c 'CREATE DATABASE pipeline'
|
||||||
|
sudo -u postgres psql -c "SET client_encoding = 'UTF8'" pipeline
|
||||||
|
pwd
|
||||||
|
rm -rf /actions-runner/_work/trmm-actions/trmm-actions/api/env
|
||||||
|
cd api
|
||||||
|
python3.10 -m venv env
|
||||||
|
source env/bin/activate
|
||||||
|
cd tacticalrmm
|
||||||
|
python --version
|
||||||
|
SETTINGS_FILE="tacticalrmm/settings.py"
|
||||||
|
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||||
|
WHEEL_VER=$(grep "^WHEEL_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip install setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||||
|
pip install -r requirements.txt -r requirements-test.txt
|
||||||
|
|
||||||
|
- name: Run django tests
|
||||||
|
env:
|
||||||
|
GHACTIONS: "yes"
|
||||||
|
run: |
|
||||||
|
cd api/tacticalrmm
|
||||||
|
source ../env/bin/activate
|
||||||
|
rm -f .coverage coverage.lcov
|
||||||
|
coverage run --concurrency=multiprocessing manage.py test -v 2 --parallel
|
||||||
|
coverage combine
|
||||||
|
coverage lcov
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Codestyle black
|
||||||
|
run: |
|
||||||
|
cd api
|
||||||
|
source env/bin/activate
|
||||||
|
black --exclude migrations/ --check tacticalrmm
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Coveralls
|
||||||
|
uses: coverallsapp/github-action@master
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
path-to-lcov: ./api/tacticalrmm/coverage.lcov
|
||||||
|
base-path: ./api/tacticalrmm
|
||||||
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
70
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ develop ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ develop ]
|
||||||
|
schedule:
|
||||||
|
- cron: '19 14 * * 6'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'go', 'javascript', 'python' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 https://git.io/JvXDl
|
||||||
|
|
||||||
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||||
|
# and modify them (or add more) to build your code if your project
|
||||||
|
# uses a compiled language
|
||||||
|
|
||||||
|
#- run: |
|
||||||
|
# make bootstrap
|
||||||
|
# make release
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
||||||
22
.github/workflows/deploy-docs.yml
vendored
22
.github/workflows/deploy-docs.yml
vendored
@@ -1,22 +0,0 @@
|
|||||||
name: Deploy Docs
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: docs
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.x
|
|
||||||
- run: pip install --upgrade pip
|
|
||||||
- run: pip install --upgrade setuptools wheel
|
|
||||||
- run: pip install mkdocs mkdocs-material pymdown-extensions
|
|
||||||
- run: mkdocs gh-deploy --force
|
|
||||||
34
.github/workflows/devskim-analysis.yml
vendored
Normal file
34
.github/workflows/devskim-analysis.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# This workflow uses actions that are not certified by GitHub.
|
||||||
|
# They are provided by a third-party and are governed by
|
||||||
|
# separate terms of service, privacy policy, and support
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
name: DevSkim
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ develop ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ develop ]
|
||||||
|
schedule:
|
||||||
|
- cron: '19 5 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: DevSkim
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Run DevSkim scanner
|
||||||
|
uses: microsoft/DevSkim-Action@v1
|
||||||
|
|
||||||
|
- name: Upload DevSkim scan results to GitHub Security tab
|
||||||
|
uses: github/codeql-action/upload-sarif@v1
|
||||||
|
with:
|
||||||
|
sarif_file: devskim-results.sarif
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -48,3 +48,8 @@ nats-rmm.conf
|
|||||||
.mypy_cache
|
.mypy_cache
|
||||||
docs/site/
|
docs/site/
|
||||||
reset_db.sh
|
reset_db.sh
|
||||||
|
run_go_cmd.py
|
||||||
|
nats-api.conf
|
||||||
|
ignore/
|
||||||
|
coverage.lcov
|
||||||
|
daphne.sock.lock
|
||||||
|
|||||||
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"python.pythonPath": "api/tacticalrmm/env/bin/python",
|
"python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python",
|
||||||
"python.languageServer": "Pylance",
|
"python.languageServer": "Pylance",
|
||||||
"python.analysis.extraPaths": [
|
"python.analysis.extraPaths": [
|
||||||
"api/tacticalrmm",
|
"api/tacticalrmm",
|
||||||
@@ -8,10 +8,18 @@
|
|||||||
"python.analysis.diagnosticSeverityOverrides": {
|
"python.analysis.diagnosticSeverityOverrides": {
|
||||||
"reportUnusedImport": "error",
|
"reportUnusedImport": "error",
|
||||||
"reportDuplicateImport": "error",
|
"reportDuplicateImport": "error",
|
||||||
|
"reportGeneralTypeIssues": "none"
|
||||||
},
|
},
|
||||||
"python.analysis.memory.keepLibraryAst": true,
|
|
||||||
"python.linting.mypyEnabled": true,
|
|
||||||
"python.analysis.typeCheckingMode": "basic",
|
"python.analysis.typeCheckingMode": "basic",
|
||||||
|
"mypy.runUsingActiveInterpreter": true,
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.linting.mypyEnabled": true,
|
||||||
|
"python.linting.mypyArgs": [
|
||||||
|
"--ignore-missing-imports",
|
||||||
|
"--follow-imports=silent",
|
||||||
|
"--show-column-numbers",
|
||||||
|
"--strict"
|
||||||
|
],
|
||||||
"python.formatting.provider": "black",
|
"python.formatting.provider": "black",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"vetur.format.defaultFormatter.js": "prettier",
|
"vetur.format.defaultFormatter.js": "prettier",
|
||||||
@@ -66,5 +74,13 @@
|
|||||||
"usePlaceholders": true,
|
"usePlaceholders": true,
|
||||||
"completeUnimported": true,
|
"completeUnimported": true,
|
||||||
"staticcheck": true,
|
"staticcheck": true,
|
||||||
}
|
},
|
||||||
|
"mypy.targets": [
|
||||||
|
"api/tacticalrmm"
|
||||||
|
],
|
||||||
|
"python.linting.ignorePatterns": [
|
||||||
|
"**/site-packages/**/*.py",
|
||||||
|
".vscode/*.py",
|
||||||
|
"**env/**"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
23
.vscode/tasks.json
vendored
23
.vscode/tasks.json
vendored
@@ -1,23 +0,0 @@
|
|||||||
{
|
|
||||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
|
||||||
// for the documentation about the tasks.json format
|
|
||||||
"version": "2.0.0",
|
|
||||||
"tasks": [
|
|
||||||
{
|
|
||||||
"label": "docker debug",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "docker-compose",
|
|
||||||
"args": [
|
|
||||||
"-p",
|
|
||||||
"trmm",
|
|
||||||
"-f",
|
|
||||||
".devcontainer/docker-compose.yml",
|
|
||||||
"-f",
|
|
||||||
".devcontainer/docker-compose.debug.yml",
|
|
||||||
"up",
|
|
||||||
"-d",
|
|
||||||
"--build"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
21
LICENSE
21
LICENSE
@@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2019-present wh1te909
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
74
LICENSE.md
Normal file
74
LICENSE.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
### Tactical RMM License Version 1.0
|
||||||
|
|
||||||
|
Text of license:   Copyright © 2022 AmidaWare LLC. All rights reserved.<br>
|
||||||
|
          Amending the text of this license is not permitted.
|
||||||
|
|
||||||
|
Trade Mark:    "Tactical RMM" is a trade mark of AmidaWare LLC.
|
||||||
|
|
||||||
|
Licensor:      AmidaWare LLC of 1968 S Coast Hwy PMB 3847 Laguna Beach, CA, USA.
|
||||||
|
|
||||||
|
Licensed Software:  The software known as Tactical RMM Version v0.12.0 (and all subsequent releases and versions) and the Tactical RMM Agent v2.0.0 (and all subsequent releases and versions).
|
||||||
|
|
||||||
|
### 1. Preamble
|
||||||
|
The Licensed Software is designed to facilitate the remote monitoring and management (RMM) of networks, systems, servers, computers and other devices. The Licensed Software is made available primarily for use by organisations and managed service providers for monitoring and management purposes.
|
||||||
|
|
||||||
|
The Tactical RMM License is not an open-source software license. This license contains certain restrictions on the use of the Licensed Software. For example the functionality of the Licensed Software may not be made available as part of a SaaS (Software-as-a-Service) service or product to provide a commercial or for-profit service without the express prior permission of the Licensor.
|
||||||
|
|
||||||
|
### 2. License Grant
|
||||||
|
Permission is hereby granted, free of charge, on a non-exclusive basis, to copy, modify, create derivative works and use the Licensed Software in source and binary forms subject to the following terms and conditions. No additional rights will be implied under this license.
|
||||||
|
|
||||||
|
* The hosting and use of the Licensed Software to monitor and manage in-house networks/systems and/or customer networks/systems is permitted.
|
||||||
|
|
||||||
|
This license does not allow the functionality of the Licensed Software (whether in whole or in part) or a modified version of the Licensed Software or a derivative work to be used or otherwise made available as part of any other commercial or for-profit service, including, without limitation, any of the following:
|
||||||
|
* a service allowing third parties to interact remotely through a computer network;
|
||||||
|
* as part of a SaaS service or product;
|
||||||
|
* as part of the provision of a managed hosting service or product;
|
||||||
|
* the offering of installation and/or configuration services;
|
||||||
|
* the offer for sale, distribution or sale of any service or product (whether or not branded as Tactical RMM).
|
||||||
|
|
||||||
|
The prior written approval of AmidaWare LLC must be obtained for all commercial use and/or for-profit service use of the (i) Licensed Software (whether in whole or in part), (ii) a modified version of the Licensed Software and/or (iii) a derivative work.
|
||||||
|
|
||||||
|
The terms of this license apply to all copies of the Licensed Software (including modified versions) and derivative works.
|
||||||
|
|
||||||
|
All use of the Licensed Software must immediately cease if use breaches the terms of this license.
|
||||||
|
|
||||||
|
### 3. Derivative Works
|
||||||
|
If a derivative work is created which is based on or otherwise incorporates all or any part of the Licensed Software, and the derivative work is made available to any other person, the complete corresponding machine readable source code (including all changes made to the Licensed Software) must accompany the derivative work and be made publicly available online.
|
||||||
|
|
||||||
|
### 4. Copyright Notice
|
||||||
|
The following copyright notice shall be included in all copies of the Licensed Software:
|
||||||
|
|
||||||
|
   Copyright © 2022 AmidaWare LLC.
|
||||||
|
|
||||||
|
   Licensed under the Tactical RMM License Version 1.0 (the “License”).<br>
|
||||||
|
   You may only use the Licensed Software in accordance with the License.<br>
|
||||||
|
   A copy of the License is available at: https://license.tacticalrmm.com
|
||||||
|
|
||||||
|
### 5. Disclaimer of Warranty
|
||||||
|
THE LICENSED SOFTWARE IS PROVIDED "AS IS". TO THE FULLEST EXTENT PERMISSIBLE AT LAW ALL CONDITIONS, WARRANTIES OR OTHER TERMS OF ANY KIND WHICH MIGHT HAVE EFFECT OR BE IMPLIED OR INCORPORATED, WHETHER BY STATUTE, COMMON LAW OR OTHERWISE ARE HEREBY EXCLUDED, INCLUDING THE CONDITIONS, WARRANTIES OR OTHER TERMS AS TO SATISFACTORY QUALITY AND/OR MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, THE USE OF REASONABLE SKILL AND CARE AND NON-INFRINGEMENT.
|
||||||
|
|
||||||
|
### 6. Limits of Liability
|
||||||
|
THE FOLLOWING EXCLUSIONS SHALL APPLY TO THE FULLEST EXTENT PERMISSIBLE AT LAW. NEITHER THE AUTHORS NOR THE COPYRIGHT HOLDERS SHALL IN ANY CIRCUMSTANCES HAVE ANY LIABILITY FOR ANY CLAIM, LOSSES, DAMAGES OR OTHER LIABILITY, WHETHER THE SAME ARE SUFFERED DIRECTLY OR INDIRECTLY OR ARE IMMEDIATE OR CONSEQUENTIAL, AND WHETHER THE SAME ARISE IN CONTRACT, TORT OR DELICT (INCLUDING NEGLIGENCE) OR OTHERWISE HOWSOEVER ARISING FROM, OUT OF OR IN CONNECTION WITH THE LICENSED SOFTWARE OR THE USE OR INABILITY TO USE THE LICENSED SOFTWARE OR OTHER DEALINGS IN THE LICENSED SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH LOSS OR DAMAGE. THE FOREGOING EXCLUSIONS SHALL INCLUDE, WITHOUT LIMITATION, LIABILITY FOR ANY LOSSES OR DAMAGES WHICH FALL WITHIN ANY OF THE FOLLOWING CATEGORIES: SPECIAL, EXEMPLARY, OR INCIDENTAL LOSS OR DAMAGE, LOSS OF PROFITS, LOSS OF ANTICIPATED SAVINGS, LOSS OF BUSINESS OPPORTUNITY, LOSS OF GOODWILL, AND LOSS OR CORRUPTION OF DATA.
|
||||||
|
|
||||||
|
### 7. Termination
|
||||||
|
This license shall terminate with immediate effect if there is a material breach of any of its terms.
|
||||||
|
|
||||||
|
### 8. No partnership, agency or joint venture
|
||||||
|
Nothing in this license agreement is intended to, or shall be deemed to, establish any partnership or joint venture or any relationship of agency between AmidaWare LLC and any other person.
|
||||||
|
|
||||||
|
### 9. No endorsement
|
||||||
|
The names of the authors and/or the copyright holders must not be used to promote or endorse any products or services which are in any way derived from the Licensed Software without prior written consent.
|
||||||
|
|
||||||
|
### 10. Trademarks
|
||||||
|
No permission is granted to use the trademark “Tactical RMM” or any other trade name, trademark, service mark or product name of AmidaWare LLC except to the extent necessary to comply with the notice requirements in Section 4 (Copyright Notice).
|
||||||
|
|
||||||
|
### 11. Entire agreement
|
||||||
|
This license contains the whole agreement relating to its subject matter.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### 12. Severance
|
||||||
|
If any provision or part-provision of this license is or becomes invalid, illegal or unenforceable, it shall be deemed deleted, but that shall not affect the validity and enforceability of the rest of this license.
|
||||||
|
|
||||||
|
### 13. Acceptance of these terms
|
||||||
|
The terms and conditions of this license are accepted by copying, downloading, installing, redistributing, or otherwise using the Licensed Software.
|
||||||
20
README.md
20
README.md
@@ -1,19 +1,18 @@
|
|||||||
# Tactical RMM
|
# Tactical RMM
|
||||||
|
|
||||||
[](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
|

|
||||||
[](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
|
[](https://coveralls.io/github/amidaware/tacticalrmm?branch=develop)
|
||||||
[](https://opensource.org/licenses/MIT)
|
|
||||||
[](https://github.com/python/black)
|
[](https://github.com/python/black)
|
||||||
|
|
||||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
Tactical RMM is a remote monitoring & management tool, built with Django and Vue.\
|
||||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||||
|
|
||||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||||
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
|
||||||
|
|
||||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||||
|
|
||||||
### [Documentation](https://wh1te909.github.io/tacticalrmm/)
|
### [Documentation](https://docs.tacticalrmm.com)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
@@ -29,10 +28,13 @@ Demo database resets every hour. A lot of features are disabled for obvious reas
|
|||||||
- Remote software installation via chocolatey
|
- Remote software installation via chocolatey
|
||||||
- Software and hardware inventory
|
- Software and hardware inventory
|
||||||
|
|
||||||
## Windows versions supported
|
## Windows agent versions supported
|
||||||
|
|
||||||
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
|
- Windows 7, 8.1, 10, 11, Server 2008R2, 2012R2, 2016, 2019, 2022
|
||||||
|
|
||||||
|
## Linux agent versions supported
|
||||||
|
- Any distro with systemd
|
||||||
|
|
||||||
## Installation / Backup / Restore / Usage
|
## Installation / Backup / Restore / Usage
|
||||||
|
|
||||||
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)
|
### Refer to the [documentation](https://docs.tacticalrmm.com)
|
||||||
|
|||||||
12
SECURITY.md
Normal file
12
SECURITY.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| 0.12.2 | :white_check_mark: |
|
||||||
|
| < 0.12.2 | :x: |
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
https://docs.tacticalrmm.com/security
|
||||||
@@ -21,4 +21,6 @@ omit =
|
|||||||
*/tests.py
|
*/tests.py
|
||||||
*/test.py
|
*/test.py
|
||||||
checks/utils.py
|
checks/utils.py
|
||||||
|
*/asgi.py
|
||||||
|
*/demo_views.py
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from rest_framework.authtoken.admin import TokenAdmin
|
from rest_framework.authtoken.admin import TokenAdmin
|
||||||
|
|
||||||
from .models import User, Role
|
from .models import Role, User
|
||||||
|
|
||||||
admin.site.register(User)
|
admin.site.register(User)
|
||||||
TokenAdmin.raw_id_fields = ("user",)
|
TokenAdmin.raw_id_fields = ("user",)
|
||||||
|
|||||||
@@ -1,18 +1,22 @@
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Creates the installer user"
|
help = "Creates the installer user"
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
|
self.stdout.write("Checking if installer user has been created...")
|
||||||
if User.objects.filter(is_installer_user=True).exists():
|
if User.objects.filter(is_installer_user=True).exists():
|
||||||
|
self.stdout.write("Installer user already exists")
|
||||||
return
|
return
|
||||||
|
|
||||||
User.objects.create_user( # type: ignore
|
User.objects.create_user( # type: ignore
|
||||||
username=uuid.uuid4().hex,
|
username=uuid.uuid4().hex,
|
||||||
is_installer_user=True,
|
is_installer_user=True,
|
||||||
password=User.objects.make_random_password(60), # type: ignore
|
password=User.objects.make_random_password(60), # type: ignore
|
||||||
|
block_dashboard_login=True,
|
||||||
)
|
)
|
||||||
|
self.stdout.write("Installer user has been created")
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import pyotp
|
import pyotp
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|||||||
@@ -2,9 +2,8 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import pyotp
|
import pyotp
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from accounts.models import User
|
from accounts.models import User
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
# Generated by Django 3.2.1 on 2021-05-11 02:33
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -0,0 +1,34 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-09-01 12:47
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0025_auto_20210721_0424'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='APIKey',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('created_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('created_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||||
|
('modified_by', models.CharField(blank=True, max_length=100, null=True)),
|
||||||
|
('modified_time', models.DateTimeField(auto_now=True, null=True)),
|
||||||
|
('name', models.CharField(max_length=25, unique=True)),
|
||||||
|
('key', models.CharField(blank=True, max_length=48, unique=True)),
|
||||||
|
('expiration', models.DateTimeField(blank=True, default=None, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_manage_api_keys',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-09-03 00:54
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0026_auto_20210901_1247'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='apikey',
|
||||||
|
name='user',
|
||||||
|
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_key', to='accounts.user'),
|
||||||
|
preserve_default=False,
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='block_dashboard_login',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
150
api/tacticalrmm/accounts/migrations/0028_auto_20211010_0249.py
Normal file
150
api/tacticalrmm/accounts/migrations/0028_auto_20211010_0249.py
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-10-10 02:49
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0018_auto_20211010_0249'),
|
||||||
|
('accounts', '0027_auto_20210903_0054'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_accounts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_agent_history',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_agents',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_alerts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_api_keys',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_automation_policies',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_autotasks',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_checks',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_clients',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_deployments',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_notes',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_pendingactions',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_roles',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_scripts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_sites',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_software',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_ping_agents',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_recover_agents',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_view_clients',
|
||||||
|
field=models.ManyToManyField(blank=True, related_name='role_clients', to='clients.Client'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_view_sites',
|
||||||
|
field=models.ManyToManyField(blank=True, related_name='role_sites', to='clients.Site'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='apikey',
|
||||||
|
name='created_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='apikey',
|
||||||
|
name='modified_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='role',
|
||||||
|
name='created_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='role',
|
||||||
|
name='modified_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='user',
|
||||||
|
name='created_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='user',
|
||||||
|
name='modified_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='user',
|
||||||
|
name='role',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='users', to='accounts.role'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-10-22 22:45
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0028_auto_20211010_0249'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_list_alerttemplates',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_manage_alerttemplates',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_run_urlactions',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-11-04 02:21
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0029_auto_20211022_2245'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_manage_customfields',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='role',
|
||||||
|
name='can_view_customfields',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
api/tacticalrmm/accounts/migrations/0031_user_date_format.py
Normal file
18
api/tacticalrmm/accounts/migrations/0031_user_date_format.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-02 15:57
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0030_auto_20211104_0221'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='date_format',
|
||||||
|
field=models.CharField(blank=True, max_length=30, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,8 +1,13 @@
|
|||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.db.models.fields import CharField, DateTimeField
|
||||||
|
from django.core.cache import cache
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from tacticalrmm.constants import ROLE_CACHE_PREFIX
|
||||||
|
|
||||||
AGENT_DBLCLICK_CHOICES = [
|
AGENT_DBLCLICK_CHOICES = [
|
||||||
("editagent", "Edit Agent"),
|
("editagent", "Edit Agent"),
|
||||||
("takecontrol", "Take Control"),
|
("takecontrol", "Take Control"),
|
||||||
@@ -24,6 +29,7 @@ CLIENT_TREE_SORT_CHOICES = [
|
|||||||
|
|
||||||
class User(AbstractUser, BaseAuditModel):
|
class User(AbstractUser, BaseAuditModel):
|
||||||
is_active = models.BooleanField(default=True)
|
is_active = models.BooleanField(default=True)
|
||||||
|
block_dashboard_login = models.BooleanField(default=False)
|
||||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||||
dark_mode = models.BooleanField(default=True)
|
dark_mode = models.BooleanField(default=True)
|
||||||
show_community_scripts = models.BooleanField(default=True)
|
show_community_scripts = models.BooleanField(default=True)
|
||||||
@@ -47,6 +53,7 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||||
clear_search_when_switching = models.BooleanField(default=True)
|
clear_search_when_switching = models.BooleanField(default=True)
|
||||||
|
date_format = models.CharField(max_length=30, blank=True, null=True)
|
||||||
is_installer_user = models.BooleanField(default=False)
|
is_installer_user = models.BooleanField(default=False)
|
||||||
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
|
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
|
||||||
|
|
||||||
@@ -62,7 +69,7 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
"accounts.Role",
|
"accounts.Role",
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
related_name="roles",
|
related_name="users",
|
||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -73,12 +80,31 @@ class User(AbstractUser, BaseAuditModel):
|
|||||||
|
|
||||||
return UserSerializer(user).data
|
return UserSerializer(user).data
|
||||||
|
|
||||||
|
def get_and_set_role_cache(self) -> "Optional[Role]":
|
||||||
|
role = cache.get(f"{ROLE_CACHE_PREFIX}{self.role}")
|
||||||
|
|
||||||
|
if role and isinstance(role, Role):
|
||||||
|
return role
|
||||||
|
elif not role and not self.role:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
models.prefetch_related_objects(
|
||||||
|
[self.role],
|
||||||
|
"can_view_clients",
|
||||||
|
"can_view_sites",
|
||||||
|
)
|
||||||
|
|
||||||
|
cache.set(f"{ROLE_CACHE_PREFIX}{self.role}", self.role, 600)
|
||||||
|
return self.role
|
||||||
|
|
||||||
|
|
||||||
class Role(BaseAuditModel):
|
class Role(BaseAuditModel):
|
||||||
name = models.CharField(max_length=255, unique=True)
|
name = models.CharField(max_length=255, unique=True)
|
||||||
is_superuser = models.BooleanField(default=False)
|
is_superuser = models.BooleanField(default=False)
|
||||||
|
|
||||||
# agents
|
# agents
|
||||||
|
can_list_agents = models.BooleanField(default=False)
|
||||||
|
can_ping_agents = models.BooleanField(default=False)
|
||||||
can_use_mesh = models.BooleanField(default=False)
|
can_use_mesh = models.BooleanField(default=False)
|
||||||
can_uninstall_agents = models.BooleanField(default=False)
|
can_uninstall_agents = models.BooleanField(default=False)
|
||||||
can_update_agents = models.BooleanField(default=False)
|
can_update_agents = models.BooleanField(default=False)
|
||||||
@@ -90,57 +116,93 @@ class Role(BaseAuditModel):
|
|||||||
can_install_agents = models.BooleanField(default=False)
|
can_install_agents = models.BooleanField(default=False)
|
||||||
can_run_scripts = models.BooleanField(default=False)
|
can_run_scripts = models.BooleanField(default=False)
|
||||||
can_run_bulk = models.BooleanField(default=False)
|
can_run_bulk = models.BooleanField(default=False)
|
||||||
|
can_recover_agents = models.BooleanField(default=False)
|
||||||
|
can_list_agent_history = models.BooleanField(default=False)
|
||||||
|
|
||||||
# core
|
# core
|
||||||
|
can_list_notes = models.BooleanField(default=False)
|
||||||
can_manage_notes = models.BooleanField(default=False)
|
can_manage_notes = models.BooleanField(default=False)
|
||||||
can_view_core_settings = models.BooleanField(default=False)
|
can_view_core_settings = models.BooleanField(default=False)
|
||||||
can_edit_core_settings = models.BooleanField(default=False)
|
can_edit_core_settings = models.BooleanField(default=False)
|
||||||
can_do_server_maint = models.BooleanField(default=False)
|
can_do_server_maint = models.BooleanField(default=False)
|
||||||
can_code_sign = models.BooleanField(default=False)
|
can_code_sign = models.BooleanField(default=False)
|
||||||
|
can_run_urlactions = models.BooleanField(default=False)
|
||||||
|
can_view_customfields = models.BooleanField(default=False)
|
||||||
|
can_manage_customfields = models.BooleanField(default=False)
|
||||||
|
|
||||||
# checks
|
# checks
|
||||||
|
can_list_checks = models.BooleanField(default=False)
|
||||||
can_manage_checks = models.BooleanField(default=False)
|
can_manage_checks = models.BooleanField(default=False)
|
||||||
can_run_checks = models.BooleanField(default=False)
|
can_run_checks = models.BooleanField(default=False)
|
||||||
|
|
||||||
# clients
|
# clients
|
||||||
|
can_list_clients = models.BooleanField(default=False)
|
||||||
can_manage_clients = models.BooleanField(default=False)
|
can_manage_clients = models.BooleanField(default=False)
|
||||||
|
can_list_sites = models.BooleanField(default=False)
|
||||||
can_manage_sites = models.BooleanField(default=False)
|
can_manage_sites = models.BooleanField(default=False)
|
||||||
|
can_list_deployments = models.BooleanField(default=False)
|
||||||
can_manage_deployments = models.BooleanField(default=False)
|
can_manage_deployments = models.BooleanField(default=False)
|
||||||
|
can_view_clients = models.ManyToManyField(
|
||||||
|
"clients.Client", related_name="role_clients", blank=True
|
||||||
|
)
|
||||||
|
can_view_sites = models.ManyToManyField(
|
||||||
|
"clients.Site", related_name="role_sites", blank=True
|
||||||
|
)
|
||||||
|
|
||||||
# automation
|
# automation
|
||||||
|
can_list_automation_policies = models.BooleanField(default=False)
|
||||||
can_manage_automation_policies = models.BooleanField(default=False)
|
can_manage_automation_policies = models.BooleanField(default=False)
|
||||||
|
|
||||||
# automated tasks
|
# automated tasks
|
||||||
|
can_list_autotasks = models.BooleanField(default=False)
|
||||||
can_manage_autotasks = models.BooleanField(default=False)
|
can_manage_autotasks = models.BooleanField(default=False)
|
||||||
can_run_autotasks = models.BooleanField(default=False)
|
can_run_autotasks = models.BooleanField(default=False)
|
||||||
|
|
||||||
# logs
|
# logs
|
||||||
can_view_auditlogs = models.BooleanField(default=False)
|
can_view_auditlogs = models.BooleanField(default=False)
|
||||||
|
can_list_pendingactions = models.BooleanField(default=False)
|
||||||
can_manage_pendingactions = models.BooleanField(default=False)
|
can_manage_pendingactions = models.BooleanField(default=False)
|
||||||
can_view_debuglogs = models.BooleanField(default=False)
|
can_view_debuglogs = models.BooleanField(default=False)
|
||||||
|
|
||||||
# scripts
|
# scripts
|
||||||
|
can_list_scripts = models.BooleanField(default=False)
|
||||||
can_manage_scripts = models.BooleanField(default=False)
|
can_manage_scripts = models.BooleanField(default=False)
|
||||||
|
|
||||||
# alerts
|
# alerts
|
||||||
|
can_list_alerts = models.BooleanField(default=False)
|
||||||
can_manage_alerts = models.BooleanField(default=False)
|
can_manage_alerts = models.BooleanField(default=False)
|
||||||
|
can_list_alerttemplates = models.BooleanField(default=False)
|
||||||
|
can_manage_alerttemplates = models.BooleanField(default=False)
|
||||||
|
|
||||||
# win services
|
# win services
|
||||||
can_manage_winsvcs = models.BooleanField(default=False)
|
can_manage_winsvcs = models.BooleanField(default=False)
|
||||||
|
|
||||||
# software
|
# software
|
||||||
|
can_list_software = models.BooleanField(default=False)
|
||||||
can_manage_software = models.BooleanField(default=False)
|
can_manage_software = models.BooleanField(default=False)
|
||||||
|
|
||||||
# windows updates
|
# windows updates
|
||||||
can_manage_winupdates = models.BooleanField(default=False)
|
can_manage_winupdates = models.BooleanField(default=False)
|
||||||
|
|
||||||
# accounts
|
# accounts
|
||||||
|
can_list_accounts = models.BooleanField(default=False)
|
||||||
can_manage_accounts = models.BooleanField(default=False)
|
can_manage_accounts = models.BooleanField(default=False)
|
||||||
|
can_list_roles = models.BooleanField(default=False)
|
||||||
can_manage_roles = models.BooleanField(default=False)
|
can_manage_roles = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# authentication
|
||||||
|
can_list_api_keys = models.BooleanField(default=False)
|
||||||
|
can_manage_api_keys = models.BooleanField(default=False)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs) -> None:
|
||||||
|
|
||||||
|
# delete cache on save
|
||||||
|
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
|
||||||
|
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(role):
|
def serialize(role):
|
||||||
# serializes the agent and returns json
|
# serializes the agent and returns json
|
||||||
@@ -148,42 +210,19 @@ class Role(BaseAuditModel):
|
|||||||
|
|
||||||
return RoleAuditSerializer(role).data
|
return RoleAuditSerializer(role).data
|
||||||
|
|
||||||
|
|
||||||
|
class APIKey(BaseAuditModel):
|
||||||
|
name = CharField(unique=True, max_length=25)
|
||||||
|
key = CharField(unique=True, blank=True, max_length=48)
|
||||||
|
expiration = DateTimeField(blank=True, null=True, default=None)
|
||||||
|
user = models.ForeignKey(
|
||||||
|
"accounts.User",
|
||||||
|
related_name="api_key",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def perms():
|
def serialize(apikey):
|
||||||
return [
|
from .serializers import APIKeyAuditSerializer
|
||||||
"is_superuser",
|
|
||||||
"can_use_mesh",
|
return APIKeyAuditSerializer(apikey).data
|
||||||
"can_uninstall_agents",
|
|
||||||
"can_update_agents",
|
|
||||||
"can_edit_agent",
|
|
||||||
"can_manage_procs",
|
|
||||||
"can_view_eventlogs",
|
|
||||||
"can_send_cmd",
|
|
||||||
"can_reboot_agents",
|
|
||||||
"can_install_agents",
|
|
||||||
"can_run_scripts",
|
|
||||||
"can_run_bulk",
|
|
||||||
"can_manage_notes",
|
|
||||||
"can_view_core_settings",
|
|
||||||
"can_edit_core_settings",
|
|
||||||
"can_do_server_maint",
|
|
||||||
"can_code_sign",
|
|
||||||
"can_manage_checks",
|
|
||||||
"can_run_checks",
|
|
||||||
"can_manage_clients",
|
|
||||||
"can_manage_sites",
|
|
||||||
"can_manage_deployments",
|
|
||||||
"can_manage_automation_policies",
|
|
||||||
"can_manage_autotasks",
|
|
||||||
"can_run_autotasks",
|
|
||||||
"can_view_auditlogs",
|
|
||||||
"can_manage_pendingactions",
|
|
||||||
"can_view_debuglogs",
|
|
||||||
"can_manage_scripts",
|
|
||||||
"can_manage_alerts",
|
|
||||||
"can_manage_winsvcs",
|
|
||||||
"can_manage_software",
|
|
||||||
"can_manage_winupdates",
|
|
||||||
"can_manage_accounts",
|
|
||||||
"can_manage_roles",
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -4,16 +4,40 @@ from tacticalrmm.permissions import _has_perm
|
|||||||
|
|
||||||
|
|
||||||
class AccountsPerms(permissions.BasePermission):
|
class AccountsPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
if r.method == "GET":
|
if r.method == "GET":
|
||||||
|
return _has_perm(r, "can_list_accounts")
|
||||||
|
else:
|
||||||
|
|
||||||
|
# allow users to reset their own password/2fa see issue #686
|
||||||
|
base_path = "/accounts/users/"
|
||||||
|
paths = ["reset/", "reset_totp/"]
|
||||||
|
|
||||||
|
if r.path in [base_path + i for i in paths]:
|
||||||
|
from accounts.models import User
|
||||||
|
|
||||||
|
try:
|
||||||
|
user = User.objects.get(pk=r.data["id"])
|
||||||
|
except User.DoesNotExist:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
if user == r.user:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return _has_perm(r, "can_manage_accounts")
|
return _has_perm(r, "can_manage_accounts")
|
||||||
|
|
||||||
|
|
||||||
class RolesPerms(permissions.BasePermission):
|
class RolesPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
if r.method == "GET":
|
if r.method == "GET":
|
||||||
return True
|
return _has_perm(r, "can_list_roles")
|
||||||
|
else:
|
||||||
return _has_perm(r, "can_manage_roles")
|
return _has_perm(r, "can_manage_roles")
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view) -> bool:
|
||||||
|
if r.method == "GET":
|
||||||
|
return _has_perm(r, "can_list_api_keys")
|
||||||
|
|
||||||
|
return _has_perm(r, "can_manage_api_keys")
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
import pyotp
|
import pyotp
|
||||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
from rest_framework.serializers import (
|
||||||
|
ModelSerializer,
|
||||||
|
ReadOnlyField,
|
||||||
|
SerializerMethodField,
|
||||||
|
)
|
||||||
|
|
||||||
from .models import User, Role
|
from .models import APIKey, Role, User
|
||||||
|
|
||||||
|
|
||||||
class UserUISerializer(ModelSerializer):
|
class UserUISerializer(ModelSerializer):
|
||||||
@@ -17,6 +21,8 @@ class UserUISerializer(ModelSerializer):
|
|||||||
"client_tree_splitter",
|
"client_tree_splitter",
|
||||||
"loading_bar_color",
|
"loading_bar_color",
|
||||||
"clear_search_when_switching",
|
"clear_search_when_switching",
|
||||||
|
"block_dashboard_login",
|
||||||
|
"date_format",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -33,6 +39,8 @@ class UserSerializer(ModelSerializer):
|
|||||||
"last_login",
|
"last_login",
|
||||||
"last_login_ip",
|
"last_login_ip",
|
||||||
"role",
|
"role",
|
||||||
|
"block_dashboard_login",
|
||||||
|
"date_format",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -55,12 +63,38 @@ class TOTPSetupSerializer(ModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class RoleSerializer(ModelSerializer):
|
class RoleSerializer(ModelSerializer):
|
||||||
|
user_count = SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Role
|
model = Role
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
def get_user_count(self, obj):
|
||||||
|
return obj.users.count()
|
||||||
|
|
||||||
|
|
||||||
class RoleAuditSerializer(ModelSerializer):
|
class RoleAuditSerializer(ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Role
|
model = Role
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeySerializer(ModelSerializer):
|
||||||
|
|
||||||
|
username = ReadOnlyField(source="user.username")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = APIKey
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyAuditSerializer(ModelSerializer):
|
||||||
|
username = ReadOnlyField(source="user.username")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = APIKey
|
||||||
|
fields = [
|
||||||
|
"name",
|
||||||
|
"username",
|
||||||
|
"expiration",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from accounts.models import APIKey, User
|
||||||
|
from accounts.serializers import APIKeySerializer
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
|
from model_bakery import baker, seq
|
||||||
|
|
||||||
from accounts.models import User
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestAccounts(TacticalTestCase):
|
class TestAccounts(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.client_setup()
|
self.setup_client()
|
||||||
self.bob = User(username="bob")
|
self.bob = User(username="bob")
|
||||||
self.bob.set_password("hunter2")
|
self.bob.set_password("hunter2")
|
||||||
self.bob.save()
|
self.bob.save()
|
||||||
@@ -25,12 +27,12 @@ class TestAccounts(TacticalTestCase):
|
|||||||
data = {"username": "bob", "password": "a3asdsa2314"}
|
data = {"username": "bob", "password": "a3asdsa2314"}
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertEqual(r.data, "bad credentials")
|
self.assertEqual(r.data, "Bad credentials")
|
||||||
|
|
||||||
data = {"username": "billy", "password": "hunter2"}
|
data = {"username": "billy", "password": "hunter2"}
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertEqual(r.data, "bad credentials")
|
self.assertEqual(r.data, "Bad credentials")
|
||||||
|
|
||||||
self.bob.totp_key = "AB5RI6YPFTZAS52G"
|
self.bob.totp_key = "AB5RI6YPFTZAS52G"
|
||||||
self.bob.save()
|
self.bob.save()
|
||||||
@@ -39,6 +41,12 @@ class TestAccounts(TacticalTestCase):
|
|||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, "ok")
|
self.assertEqual(r.data, "ok")
|
||||||
|
|
||||||
|
# test user set to block dashboard logins
|
||||||
|
self.bob.block_dashboard_login = True
|
||||||
|
self.bob.save()
|
||||||
|
r = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(r.status_code, 400)
|
||||||
|
|
||||||
@patch("pyotp.TOTP.verify")
|
@patch("pyotp.TOTP.verify")
|
||||||
def test_login_view(self, mock_verify):
|
def test_login_view(self, mock_verify):
|
||||||
url = "/login/"
|
url = "/login/"
|
||||||
@@ -53,7 +61,7 @@ class TestAccounts(TacticalTestCase):
|
|||||||
mock_verify.return_value = False
|
mock_verify.return_value = False
|
||||||
r = self.client.post(url, data, format="json")
|
r = self.client.post(url, data, format="json")
|
||||||
self.assertEqual(r.status_code, 400)
|
self.assertEqual(r.status_code, 400)
|
||||||
self.assertEqual(r.data, "bad credentials")
|
self.assertEqual(r.data, "Bad credentials")
|
||||||
|
|
||||||
mock_verify.return_value = True
|
mock_verify.return_value = True
|
||||||
data = {"username": "bob", "password": "asd234234asd", "twofactor": "123456"}
|
data = {"username": "bob", "password": "asd234234asd", "twofactor": "123456"}
|
||||||
@@ -288,6 +296,68 @@ class TestUserAction(TacticalTestCase):
|
|||||||
self.check_not_authenticated("patch", url)
|
self.check_not_authenticated("patch", url)
|
||||||
|
|
||||||
|
|
||||||
|
class TestAPIKeyViews(TacticalTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.setup_coresettings()
|
||||||
|
self.authenticate()
|
||||||
|
|
||||||
|
def test_get_api_keys(self):
|
||||||
|
url = "/accounts/apikeys/"
|
||||||
|
apikeys = baker.make("accounts.APIKey", key=seq("APIKEY"), _quantity=3)
|
||||||
|
|
||||||
|
serializer = APIKeySerializer(apikeys, many=True)
|
||||||
|
resp = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertEqual(serializer.data, resp.data)
|
||||||
|
|
||||||
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
def test_add_api_keys(self):
|
||||||
|
url = "/accounts/apikeys/"
|
||||||
|
|
||||||
|
user = baker.make("accounts.User")
|
||||||
|
data = {"name": "Name", "user": user.id, "expiration": None}
|
||||||
|
|
||||||
|
resp = self.client.post(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
self.assertTrue(APIKey.objects.filter(name="Name").exists())
|
||||||
|
self.assertTrue(APIKey.objects.get(name="Name").key)
|
||||||
|
|
||||||
|
self.check_not_authenticated("post", url)
|
||||||
|
|
||||||
|
def test_modify_api_key(self):
|
||||||
|
# test a call where api key doesn't exist
|
||||||
|
resp = self.client.put("/accounts/apikeys/500/", format="json")
|
||||||
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
apikey = baker.make("accounts.APIKey", name="Test")
|
||||||
|
url = f"/accounts/apikeys/{apikey.pk}/"
|
||||||
|
|
||||||
|
data = {"name": "New Name"}
|
||||||
|
|
||||||
|
resp = self.client.put(url, data, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
apikey = APIKey.objects.get(pk=apikey.pk)
|
||||||
|
self.assertEquals(apikey.name, "New Name")
|
||||||
|
|
||||||
|
self.check_not_authenticated("put", url)
|
||||||
|
|
||||||
|
def test_delete_api_key(self):
|
||||||
|
# test a call where api key doesn't exist
|
||||||
|
resp = self.client.delete("/accounts/apikeys/500/", format="json")
|
||||||
|
self.assertEqual(resp.status_code, 404)
|
||||||
|
|
||||||
|
# test delete api key
|
||||||
|
apikey = baker.make("accounts.APIKey")
|
||||||
|
url = f"/accounts/apikeys/{apikey.pk}/"
|
||||||
|
resp = self.client.delete(url, format="json")
|
||||||
|
self.assertEqual(resp.status_code, 200)
|
||||||
|
|
||||||
|
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists())
|
||||||
|
|
||||||
|
self.check_not_authenticated("delete", url)
|
||||||
|
|
||||||
|
|
||||||
class TestTOTPSetup(TacticalTestCase):
|
class TestTOTPSetup(TacticalTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.authenticate()
|
self.authenticate()
|
||||||
@@ -313,3 +383,29 @@ class TestTOTPSetup(TacticalTestCase):
|
|||||||
r = self.client.post(url)
|
r = self.client.post(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data, "totp token already set")
|
self.assertEqual(r.data, "totp token already set")
|
||||||
|
|
||||||
|
|
||||||
|
class TestAPIAuthentication(TacticalTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
# create User and associate to API Key
|
||||||
|
self.user = User.objects.create(username="api_user", is_superuser=True)
|
||||||
|
self.api_key = APIKey.objects.create(
|
||||||
|
name="Test Token", key="123456", user=self.user
|
||||||
|
)
|
||||||
|
|
||||||
|
self.setup_client()
|
||||||
|
|
||||||
|
def test_api_auth(self):
|
||||||
|
url = "/clients/"
|
||||||
|
# auth should fail if no header set
|
||||||
|
self.check_not_authenticated("get", url)
|
||||||
|
|
||||||
|
# invalid api key in header should return code 400
|
||||||
|
self.client.credentials(HTTP_X_API_KEY="000000")
|
||||||
|
r = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(r.status_code, 401)
|
||||||
|
|
||||||
|
# valid api key in header should return code 200
|
||||||
|
self.client.credentials(HTTP_X_API_KEY="123456")
|
||||||
|
r = self.client.get(url, format="json")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|||||||
@@ -9,7 +9,8 @@ urlpatterns = [
|
|||||||
path("users/reset_totp/", views.UserActions.as_view()),
|
path("users/reset_totp/", views.UserActions.as_view()),
|
||||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||||
path("users/ui/", views.UserUI.as_view()),
|
path("users/ui/", views.UserUI.as_view()),
|
||||||
path("permslist/", views.PermsList.as_view()),
|
|
||||||
path("roles/", views.GetAddRoles.as_view()),
|
path("roles/", views.GetAddRoles.as_view()),
|
||||||
path("<int:pk>/role/", views.GetUpdateDeleteRole.as_view()),
|
path("roles/<int:pk>/", views.GetUpdateDeleteRole.as_view()),
|
||||||
|
path("apikeys/", views.GetAddAPIKeys.as_view()),
|
||||||
|
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,16 +6,17 @@ from django.shortcuts import get_object_or_404
|
|||||||
from ipware import get_client_ip
|
from ipware import get_client_ip
|
||||||
from knox.views import LoginView as KnoxLoginView
|
from knox.views import LoginView as KnoxLoginView
|
||||||
from logs.models import AuditLog
|
from logs.models import AuditLog
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .models import Role, User
|
from .models import APIKey, Role, User
|
||||||
from .permissions import AccountsPerms, RolesPerms
|
from .permissions import AccountsPerms, APIKeyPerms, RolesPerms
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
|
APIKeySerializer,
|
||||||
RoleSerializer,
|
RoleSerializer,
|
||||||
TOTPSetupSerializer,
|
TOTPSetupSerializer,
|
||||||
UserSerializer,
|
UserSerializer,
|
||||||
@@ -24,11 +25,15 @@ from .serializers import (
|
|||||||
|
|
||||||
|
|
||||||
def _is_root_user(request, user) -> bool:
|
def _is_root_user(request, user) -> bool:
|
||||||
return (
|
root = (
|
||||||
hasattr(settings, "ROOT_USER")
|
hasattr(settings, "ROOT_USER")
|
||||||
and request.user != user
|
and request.user != user
|
||||||
and user.username == settings.ROOT_USER
|
and user.username == settings.ROOT_USER
|
||||||
)
|
)
|
||||||
|
demo = (
|
||||||
|
getattr(settings, "DEMO", False) and request.user.username == settings.ROOT_USER
|
||||||
|
)
|
||||||
|
return root or demo
|
||||||
|
|
||||||
|
|
||||||
class CheckCreds(KnoxLoginView):
|
class CheckCreds(KnoxLoginView):
|
||||||
@@ -43,10 +48,13 @@ class CheckCreds(KnoxLoginView):
|
|||||||
AuditLog.audit_user_failed_login(
|
AuditLog.audit_user_failed_login(
|
||||||
request.data["username"], debug_info={"ip": request._client_ip}
|
request.data["username"], debug_info={"ip": request._client_ip}
|
||||||
)
|
)
|
||||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
return notify_error("Bad credentials")
|
||||||
|
|
||||||
user = serializer.validated_data["user"]
|
user = serializer.validated_data["user"]
|
||||||
|
|
||||||
|
if user.block_dashboard_login:
|
||||||
|
return notify_error("Bad credentials")
|
||||||
|
|
||||||
# if totp token not set modify response to notify frontend
|
# if totp token not set modify response to notify frontend
|
||||||
if not user.totp_key:
|
if not user.totp_key:
|
||||||
login(request, user)
|
login(request, user)
|
||||||
@@ -68,11 +76,16 @@ class LoginView(KnoxLoginView):
|
|||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
user = serializer.validated_data["user"]
|
user = serializer.validated_data["user"]
|
||||||
|
|
||||||
|
if user.block_dashboard_login:
|
||||||
|
return notify_error("Bad credentials")
|
||||||
|
|
||||||
token = request.data["twofactor"]
|
token = request.data["twofactor"]
|
||||||
totp = pyotp.TOTP(user.totp_key)
|
totp = pyotp.TOTP(user.totp_key)
|
||||||
|
|
||||||
if settings.DEBUG and token == "sekret":
|
if settings.DEBUG and token == "sekret":
|
||||||
valid = True
|
valid = True
|
||||||
|
elif getattr(settings, "DEMO", False):
|
||||||
|
valid = True
|
||||||
elif totp.verify(token, valid_window=10):
|
elif totp.verify(token, valid_window=10):
|
||||||
valid = True
|
valid = True
|
||||||
|
|
||||||
@@ -92,7 +105,7 @@ class LoginView(KnoxLoginView):
|
|||||||
AuditLog.audit_user_failed_twofactor(
|
AuditLog.audit_user_failed_twofactor(
|
||||||
request.data["username"], debug_info={"ip": request._client_ip}
|
request.data["username"], debug_info={"ip": request._client_ip}
|
||||||
)
|
)
|
||||||
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
|
return notify_error("Bad credentials")
|
||||||
|
|
||||||
|
|
||||||
class GetAddUsers(APIView):
|
class GetAddUsers(APIView):
|
||||||
@@ -123,7 +136,9 @@ class GetAddUsers(APIView):
|
|||||||
f"ERROR: User {request.data['username']} already exists!"
|
f"ERROR: User {request.data['username']} already exists!"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if "first_name" in request.data.keys():
|
||||||
user.first_name = request.data["first_name"]
|
user.first_name = request.data["first_name"]
|
||||||
|
if "last_name" in request.data.keys():
|
||||||
user.last_name = request.data["last_name"]
|
user.last_name = request.data["last_name"]
|
||||||
if "role" in request.data.keys() and isinstance(request.data["role"], int):
|
if "role" in request.data.keys() and isinstance(request.data["role"], int):
|
||||||
role = get_object_or_404(Role, pk=request.data["role"])
|
role = get_object_or_404(Role, pk=request.data["role"])
|
||||||
@@ -215,11 +230,6 @@ class UserUI(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class PermsList(APIView):
|
|
||||||
def get(self, request):
|
|
||||||
return Response(Role.perms())
|
|
||||||
|
|
||||||
|
|
||||||
class GetAddRoles(APIView):
|
class GetAddRoles(APIView):
|
||||||
permission_classes = [IsAuthenticated, RolesPerms]
|
permission_classes = [IsAuthenticated, RolesPerms]
|
||||||
|
|
||||||
@@ -231,7 +241,7 @@ class GetAddRoles(APIView):
|
|||||||
serializer = RoleSerializer(data=request.data)
|
serializer = RoleSerializer(data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response("ok")
|
return Response("Role was added")
|
||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteRole(APIView):
|
class GetUpdateDeleteRole(APIView):
|
||||||
@@ -246,9 +256,48 @@ class GetUpdateDeleteRole(APIView):
|
|||||||
serializer = RoleSerializer(instance=role, data=request.data)
|
serializer = RoleSerializer(instance=role, data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response("ok")
|
return Response("Role was edited")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
role = get_object_or_404(Role, pk=pk)
|
role = get_object_or_404(Role, pk=pk)
|
||||||
role.delete()
|
role.delete()
|
||||||
return Response("ok")
|
return Response("Role was removed")
|
||||||
|
|
||||||
|
|
||||||
|
class GetAddAPIKeys(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
apikeys = APIKey.objects.all()
|
||||||
|
return Response(APIKeySerializer(apikeys, many=True).data)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
# generate a random API Key
|
||||||
|
from django.utils.crypto import get_random_string
|
||||||
|
|
||||||
|
request.data["key"] = get_random_string(length=32).upper()
|
||||||
|
serializer = APIKeySerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
obj = serializer.save()
|
||||||
|
return Response("The API Key was added")
|
||||||
|
|
||||||
|
|
||||||
|
class GetUpdateDeleteAPIKey(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, APIKeyPerms]
|
||||||
|
|
||||||
|
def put(self, request, pk):
|
||||||
|
apikey = get_object_or_404(APIKey, pk=pk)
|
||||||
|
|
||||||
|
# remove API key is present in request data
|
||||||
|
if "key" in request.data.keys():
|
||||||
|
request.data.pop("key")
|
||||||
|
|
||||||
|
serializer = APIKeySerializer(instance=apikey, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
return Response("The API Key was edited")
|
||||||
|
|
||||||
|
def delete(self, request, pk):
|
||||||
|
apikey = get_object_or_404(APIKey, pk=pk)
|
||||||
|
apikey.delete()
|
||||||
|
return Response("The API Key was deleted")
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
|
from .models import Agent, AgentCustomField, AgentHistory, Note
|
||||||
|
|
||||||
admin.site.register(Agent)
|
admin.site.register(Agent)
|
||||||
admin.site.register(RecoveryAction)
|
|
||||||
admin.site.register(Note)
|
admin.site.register(Note)
|
||||||
admin.site.register(AgentCustomField)
|
admin.site.register(AgentCustomField)
|
||||||
admin.site.register(AgentHistory)
|
admin.site.register(AgentHistory)
|
||||||
|
|||||||
@@ -30,7 +30,9 @@ agent = Recipe(
|
|||||||
hostname="DESKTOP-TEST123",
|
hostname="DESKTOP-TEST123",
|
||||||
version="1.3.0",
|
version="1.3.0",
|
||||||
monitoring_type=cycle(["workstation", "server"]),
|
monitoring_type=cycle(["workstation", "server"]),
|
||||||
agent_id=seq("asdkj3h4234-1234hg3h4g34-234jjh34|DESKTOP-TEST123"),
|
agent_id=seq(generate_agent_id("DESKTOP-TEST123")),
|
||||||
|
last_seen=djangotime.now() - djangotime.timedelta(days=5),
|
||||||
|
plat="windows",
|
||||||
)
|
)
|
||||||
|
|
||||||
server_agent = agent.extend(
|
server_agent = agent.extend(
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from clients.models import Client, Site
|
from clients.models import Client, Site
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
|||||||
@@ -0,0 +1,82 @@
|
|||||||
|
import asyncio
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from packaging import version as pyver
|
||||||
|
|
||||||
|
from tacticalrmm.constants import AGENT_DEFER
|
||||||
|
from tacticalrmm.utils import reload_nats
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Delete old agents"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--days",
|
||||||
|
type=int,
|
||||||
|
help="Delete agents that have not checked in for this many days",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--agentver",
|
||||||
|
type=str,
|
||||||
|
help="Delete agents that equal to or less than this version",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--delete",
|
||||||
|
action="store_true",
|
||||||
|
help="This will delete agents",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
days = kwargs["days"]
|
||||||
|
agentver = kwargs["agentver"]
|
||||||
|
delete = kwargs["delete"]
|
||||||
|
|
||||||
|
if not days and not agentver:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.ERROR("Must have at least one parameter: days or agentver")
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
q = Agent.objects.defer(*AGENT_DEFER)
|
||||||
|
|
||||||
|
agents = []
|
||||||
|
if days:
|
||||||
|
overdue = djangotime.now() - djangotime.timedelta(days=days)
|
||||||
|
agents = [i for i in q if i.last_seen < overdue]
|
||||||
|
|
||||||
|
if agentver:
|
||||||
|
agents = [i for i in q if pyver.parse(i.version) <= pyver.parse(agentver)]
|
||||||
|
|
||||||
|
if not agents:
|
||||||
|
self.stdout.write(self.style.ERROR("No agents matched"))
|
||||||
|
return
|
||||||
|
|
||||||
|
deleted_count = 0
|
||||||
|
for agent in agents:
|
||||||
|
s = f"{agent.hostname} | Version {agent.version} | Last Seen {agent.last_seen} | {agent.client} > {agent.site}"
|
||||||
|
if delete:
|
||||||
|
s = "Deleting " + s
|
||||||
|
self.stdout.write(self.style.SUCCESS(s))
|
||||||
|
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||||
|
try:
|
||||||
|
agent.delete()
|
||||||
|
except Exception as e:
|
||||||
|
err = f"Failed to delete agent {agent.hostname}: {str(e)}"
|
||||||
|
self.stdout.write(self.style.ERROR(err))
|
||||||
|
else:
|
||||||
|
deleted_count += 1
|
||||||
|
else:
|
||||||
|
self.stdout.write(self.style.WARNING(s))
|
||||||
|
|
||||||
|
if delete:
|
||||||
|
reload_nats()
|
||||||
|
self.stdout.write(self.style.SUCCESS(f"Deleted {deleted_count} agents"))
|
||||||
|
else:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
"The above agents would be deleted. Run again with --delete to actually delete them."
|
||||||
|
)
|
||||||
|
)
|
||||||
40
api/tacticalrmm/agents/management/commands/demo_cron.py
Normal file
40
api/tacticalrmm/agents/management/commands/demo_cron.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# import datetime as dt
|
||||||
|
import random
|
||||||
|
|
||||||
|
from agents.models import Agent
|
||||||
|
from core.tasks import cache_db_fields_task, handle_resolved_stuff
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "stuff for demo site in cron"
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
|
||||||
|
random_dates = []
|
||||||
|
now = djangotime.now()
|
||||||
|
|
||||||
|
for _ in range(20):
|
||||||
|
rand = now - djangotime.timedelta(minutes=random.randint(1, 2))
|
||||||
|
random_dates.append(rand)
|
||||||
|
|
||||||
|
for _ in range(5):
|
||||||
|
rand = now - djangotime.timedelta(minutes=random.randint(10, 20))
|
||||||
|
random_dates.append(rand)
|
||||||
|
|
||||||
|
""" for _ in range(5):
|
||||||
|
rand = djangotime.now() - djangotime.timedelta(hours=random.randint(1, 10))
|
||||||
|
random_dates.append(rand)
|
||||||
|
|
||||||
|
for _ in range(5):
|
||||||
|
rand = djangotime.now() - djangotime.timedelta(days=random.randint(40, 90))
|
||||||
|
random_dates.append(rand) """
|
||||||
|
|
||||||
|
agents = Agent.objects.only("last_seen")
|
||||||
|
for agent in agents:
|
||||||
|
agent.last_seen = random.choice(random_dates)
|
||||||
|
agent.save(update_fields=["last_seen"])
|
||||||
|
|
||||||
|
cache_db_fields_task()
|
||||||
|
handle_resolved_stuff()
|
||||||
745
api/tacticalrmm/agents/management/commands/fake_agents.py
Normal file
745
api/tacticalrmm/agents/management/commands/fake_agents.py
Normal file
@@ -0,0 +1,745 @@
|
|||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
|
||||||
|
from accounts.models import User
|
||||||
|
from agents.models import Agent, AgentHistory
|
||||||
|
from automation.models import Policy
|
||||||
|
from autotasks.models import AutomatedTask, TaskResult
|
||||||
|
from checks.models import Check, CheckResult, CheckHistory
|
||||||
|
from clients.models import Client, Site
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.management import call_command
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import timezone as djangotime
|
||||||
|
from logs.models import AuditLog, PendingAction
|
||||||
|
from scripts.models import Script
|
||||||
|
from software.models import InstalledSoftware
|
||||||
|
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||||
|
|
||||||
|
from tacticalrmm.demo_data import (
|
||||||
|
disks,
|
||||||
|
ping_fail_output,
|
||||||
|
ping_success_output,
|
||||||
|
spooler_stdout,
|
||||||
|
temp_dir_stdout,
|
||||||
|
)
|
||||||
|
|
||||||
|
AGENTS_TO_GENERATE = 250
|
||||||
|
|
||||||
|
SVCS = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
|
||||||
|
WMI_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi1.json")
|
||||||
|
WMI_2 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi2.json")
|
||||||
|
WMI_3 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi3.json")
|
||||||
|
SW_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/software1.json")
|
||||||
|
SW_2 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/software2.json")
|
||||||
|
WIN_UPDATES = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winupdates.json")
|
||||||
|
EVT_LOG_FAIL = settings.BASE_DIR.joinpath(
|
||||||
|
"tacticalrmm/test_data/eventlog_check_fail.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "populate database with fake agents"
|
||||||
|
|
||||||
|
def rand_string(self, length):
|
||||||
|
chars = string.ascii_letters
|
||||||
|
return "".join(random.choice(chars) for _ in range(length))
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
if user:
|
||||||
|
user.totp_key = "ABSA234234"
|
||||||
|
user.save(update_fields=["totp_key"])
|
||||||
|
|
||||||
|
Client.objects.all().delete()
|
||||||
|
Agent.objects.all().delete()
|
||||||
|
Check.objects.all().delete()
|
||||||
|
Script.objects.all().delete()
|
||||||
|
AutomatedTask.objects.all().delete()
|
||||||
|
CheckHistory.objects.all().delete()
|
||||||
|
Policy.objects.all().delete()
|
||||||
|
AuditLog.objects.all().delete()
|
||||||
|
PendingAction.objects.all().delete()
|
||||||
|
|
||||||
|
call_command("load_community_scripts")
|
||||||
|
|
||||||
|
# policies
|
||||||
|
check_policy = Policy()
|
||||||
|
check_policy.name = "Demo Checks Policy"
|
||||||
|
check_policy.desc = "Demo Checks Policy"
|
||||||
|
check_policy.active = True
|
||||||
|
check_policy.enforced = True
|
||||||
|
check_policy.save()
|
||||||
|
|
||||||
|
patch_policy = Policy()
|
||||||
|
patch_policy.name = "Demo Patch Policy"
|
||||||
|
patch_policy.desc = "Demo Patch Policy"
|
||||||
|
patch_policy.active = True
|
||||||
|
patch_policy.enforced = True
|
||||||
|
patch_policy.save()
|
||||||
|
|
||||||
|
update_policy = WinUpdatePolicy()
|
||||||
|
update_policy.policy = patch_policy
|
||||||
|
update_policy.critical = "approve"
|
||||||
|
update_policy.important = "approve"
|
||||||
|
update_policy.moderate = "approve"
|
||||||
|
update_policy.low = "ignore"
|
||||||
|
update_policy.other = "ignore"
|
||||||
|
update_policy.run_time_days = [6, 0, 2]
|
||||||
|
update_policy.run_time_day = 1
|
||||||
|
update_policy.reboot_after_install = "required"
|
||||||
|
update_policy.reprocess_failed = True
|
||||||
|
update_policy.email_if_fail = True
|
||||||
|
update_policy.save()
|
||||||
|
|
||||||
|
clients = [
|
||||||
|
"Company 2",
|
||||||
|
"Company 3",
|
||||||
|
"Company 1",
|
||||||
|
"Company 4",
|
||||||
|
"Company 5",
|
||||||
|
"Company 6",
|
||||||
|
]
|
||||||
|
sites1 = ["HQ1", "LA Office 1", "NY Office 1"]
|
||||||
|
sites2 = ["HQ2", "LA Office 2", "NY Office 2"]
|
||||||
|
sites3 = ["HQ3", "LA Office 3", "NY Office 3"]
|
||||||
|
sites4 = ["HQ4", "LA Office 4", "NY Office 4"]
|
||||||
|
sites5 = ["HQ5", "LA Office 5", "NY Office 5"]
|
||||||
|
sites6 = ["HQ6", "LA Office 6", "NY Office 6"]
|
||||||
|
|
||||||
|
client1 = Client(name="Company 1")
|
||||||
|
client2 = Client(name="Company 2")
|
||||||
|
client3 = Client(name="Company 3")
|
||||||
|
client4 = Client(name="Company 4")
|
||||||
|
client5 = Client(name="Company 5")
|
||||||
|
client6 = Client(name="Company 6")
|
||||||
|
|
||||||
|
client1.save()
|
||||||
|
client2.save()
|
||||||
|
client3.save()
|
||||||
|
client4.save()
|
||||||
|
client5.save()
|
||||||
|
client6.save()
|
||||||
|
|
||||||
|
for site in sites1:
|
||||||
|
Site(client=client1, name=site).save()
|
||||||
|
|
||||||
|
for site in sites2:
|
||||||
|
Site(client=client2, name=site).save()
|
||||||
|
|
||||||
|
for site in sites3:
|
||||||
|
Site(client=client3, name=site).save()
|
||||||
|
|
||||||
|
for site in sites4:
|
||||||
|
Site(client=client4, name=site).save()
|
||||||
|
|
||||||
|
for site in sites5:
|
||||||
|
Site(client=client5, name=site).save()
|
||||||
|
|
||||||
|
for site in sites6:
|
||||||
|
Site(client=client6, name=site).save()
|
||||||
|
|
||||||
|
hostnames = [
|
||||||
|
"DC-1",
|
||||||
|
"DC-2",
|
||||||
|
"FSV-1",
|
||||||
|
"FSV-2",
|
||||||
|
"WSUS",
|
||||||
|
"DESKTOP-12345",
|
||||||
|
"LAPTOP-55443",
|
||||||
|
]
|
||||||
|
descriptions = ["Bob's computer", "Primary DC", "File Server", "Karen's Laptop"]
|
||||||
|
modes = ["server", "workstation"]
|
||||||
|
op_systems_servers = [
|
||||||
|
"Microsoft Windows Server 2016 Standard, 64bit (build 14393)",
|
||||||
|
"Microsoft Windows Server 2012 R2 Standard, 64bit (build 9600)",
|
||||||
|
"Microsoft Windows Server 2019 Standard, 64bit (build 17763)",
|
||||||
|
]
|
||||||
|
|
||||||
|
op_systems_workstations = [
|
||||||
|
"Microsoft Windows 8.1 Pro, 64bit (build 9600)",
|
||||||
|
"Microsoft Windows 10 Pro for Workstations, 64bit (build 18363)",
|
||||||
|
"Microsoft Windows 10 Pro, 64bit (build 18363)",
|
||||||
|
]
|
||||||
|
|
||||||
|
public_ips = ["65.234.22.4", "74.123.43.5", "44.21.134.45"]
|
||||||
|
|
||||||
|
total_rams = [4, 8, 16, 32, 64, 128]
|
||||||
|
|
||||||
|
now = dt.datetime.now()
|
||||||
|
|
||||||
|
boot_times = []
|
||||||
|
|
||||||
|
for _ in range(15):
|
||||||
|
rand_hour = now - dt.timedelta(hours=random.randint(1, 22))
|
||||||
|
boot_times.append(str(rand_hour.timestamp()))
|
||||||
|
|
||||||
|
for _ in range(5):
|
||||||
|
rand_days = now - dt.timedelta(days=random.randint(2, 50))
|
||||||
|
boot_times.append(str(rand_days.timestamp()))
|
||||||
|
|
||||||
|
user_names = ["None", "Karen", "Steve", "jsmith", "jdoe"]
|
||||||
|
|
||||||
|
with open(SVCS) as f:
|
||||||
|
services = json.load(f)
|
||||||
|
|
||||||
|
# WMI
|
||||||
|
with open(WMI_1) as f:
|
||||||
|
wmi1 = json.load(f)
|
||||||
|
|
||||||
|
with open(WMI_2) as f:
|
||||||
|
wmi2 = json.load(f)
|
||||||
|
|
||||||
|
with open(WMI_3) as f:
|
||||||
|
wmi3 = json.load(f)
|
||||||
|
|
||||||
|
wmi_details = []
|
||||||
|
wmi_details.append(wmi1)
|
||||||
|
wmi_details.append(wmi2)
|
||||||
|
wmi_details.append(wmi3)
|
||||||
|
|
||||||
|
# software
|
||||||
|
with open(SW_1) as f:
|
||||||
|
software1 = json.load(f)
|
||||||
|
|
||||||
|
with open(SW_2) as f:
|
||||||
|
software2 = json.load(f)
|
||||||
|
|
||||||
|
softwares = []
|
||||||
|
softwares.append(software1)
|
||||||
|
softwares.append(software2)
|
||||||
|
|
||||||
|
# windows updates
|
||||||
|
with open(WIN_UPDATES) as f:
|
||||||
|
windows_updates = json.load(f)["samplecomputer"]
|
||||||
|
|
||||||
|
# event log check fail data
|
||||||
|
with open(EVT_LOG_FAIL) as f:
|
||||||
|
eventlog_check_fail_data = json.load(f)
|
||||||
|
|
||||||
|
# create scripts
|
||||||
|
|
||||||
|
clear_spool = Script()
|
||||||
|
clear_spool.name = "Clear Print Spooler"
|
||||||
|
clear_spool.description = "clears the print spooler. Fuck printers"
|
||||||
|
clear_spool.filename = "clear_print_spool.bat"
|
||||||
|
clear_spool.shell = "cmd"
|
||||||
|
clear_spool.save()
|
||||||
|
|
||||||
|
check_net_aware = Script()
|
||||||
|
check_net_aware.name = "Check Network Location Awareness"
|
||||||
|
check_net_aware.description = "Check's network location awareness on domain computers, should always be domain profile and not public or private. Sometimes happens when computer restarts before domain available. This script will return 0 if check passes or 1 if it fails."
|
||||||
|
check_net_aware.filename = "check_network_loc_aware.ps1"
|
||||||
|
check_net_aware.shell = "powershell"
|
||||||
|
check_net_aware.save()
|
||||||
|
|
||||||
|
check_pool_health = Script()
|
||||||
|
check_pool_health.name = "Check storage spool health"
|
||||||
|
check_pool_health.description = "loops through all storage pools and will fail if any of them are not healthy"
|
||||||
|
check_pool_health.filename = "check_storage_pool_health.ps1"
|
||||||
|
check_pool_health.shell = "powershell"
|
||||||
|
check_pool_health.save()
|
||||||
|
|
||||||
|
restart_nla = Script()
|
||||||
|
restart_nla.name = "Restart NLA Service"
|
||||||
|
restart_nla.description = "restarts the Network Location Awareness windows service to fix the nic profile. Run this after the check network service fails"
|
||||||
|
restart_nla.filename = "restart_nla.ps1"
|
||||||
|
restart_nla.shell = "powershell"
|
||||||
|
restart_nla.save()
|
||||||
|
|
||||||
|
show_tmp_dir_script = Script()
|
||||||
|
show_tmp_dir_script.name = "Check temp dir"
|
||||||
|
show_tmp_dir_script.description = "shows files in temp dir using python"
|
||||||
|
show_tmp_dir_script.filename = "show_temp_dir.py"
|
||||||
|
show_tmp_dir_script.shell = "python"
|
||||||
|
show_tmp_dir_script.save()
|
||||||
|
|
||||||
|
for count_agents in range(AGENTS_TO_GENERATE):
|
||||||
|
|
||||||
|
client = random.choice(clients)
|
||||||
|
|
||||||
|
if client == "Company 1":
|
||||||
|
site = random.choice(sites1)
|
||||||
|
elif client == "Company 2":
|
||||||
|
site = random.choice(sites2)
|
||||||
|
elif client == "Company 3":
|
||||||
|
site = random.choice(sites3)
|
||||||
|
elif client == "Company 4":
|
||||||
|
site = random.choice(sites4)
|
||||||
|
elif client == "Company 5":
|
||||||
|
site = random.choice(sites5)
|
||||||
|
elif client == "Company 6":
|
||||||
|
site = random.choice(sites6)
|
||||||
|
else:
|
||||||
|
site = None
|
||||||
|
|
||||||
|
agent = Agent()
|
||||||
|
|
||||||
|
mode = random.choice(modes)
|
||||||
|
if mode == "server":
|
||||||
|
agent.operating_system = random.choice(op_systems_servers)
|
||||||
|
else:
|
||||||
|
agent.operating_system = random.choice(op_systems_workstations)
|
||||||
|
|
||||||
|
agent.hostname = random.choice(hostnames)
|
||||||
|
agent.version = settings.LATEST_AGENT_VER
|
||||||
|
agent.site = Site.objects.get(name=site)
|
||||||
|
agent.agent_id = self.rand_string(25)
|
||||||
|
agent.description = random.choice(descriptions)
|
||||||
|
agent.monitoring_type = mode
|
||||||
|
agent.public_ip = random.choice(public_ips)
|
||||||
|
agent.last_seen = djangotime.now()
|
||||||
|
agent.plat = "windows"
|
||||||
|
agent.plat_release = "windows-2019Server"
|
||||||
|
agent.total_ram = random.choice(total_rams)
|
||||||
|
agent.boot_time = random.choice(boot_times)
|
||||||
|
agent.logged_in_username = random.choice(user_names)
|
||||||
|
agent.mesh_node_id = (
|
||||||
|
"3UiLhe420@kaVQ0rswzBeonW$WY0xrFFUDBQlcYdXoriLXzvPmBpMrV99vRHXFlb"
|
||||||
|
)
|
||||||
|
agent.overdue_email_alert = random.choice([True, False])
|
||||||
|
agent.overdue_text_alert = random.choice([True, False])
|
||||||
|
agent.needs_reboot = random.choice([True, False])
|
||||||
|
agent.wmi_detail = random.choice(wmi_details)
|
||||||
|
agent.services = services
|
||||||
|
agent.disks = random.choice(disks)
|
||||||
|
|
||||||
|
agent.save()
|
||||||
|
|
||||||
|
InstalledSoftware(agent=agent, software=random.choice(softwares)).save()
|
||||||
|
|
||||||
|
if mode == "workstation":
|
||||||
|
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
|
||||||
|
else:
|
||||||
|
WinUpdatePolicy(agent=agent).save()
|
||||||
|
|
||||||
|
# windows updates load
|
||||||
|
guids = []
|
||||||
|
for k in windows_updates.keys():
|
||||||
|
guids.append(k)
|
||||||
|
|
||||||
|
for i in guids:
|
||||||
|
WinUpdate(
|
||||||
|
agent=agent,
|
||||||
|
guid=i,
|
||||||
|
kb=windows_updates[i]["KBs"][0],
|
||||||
|
title=windows_updates[i]["Title"],
|
||||||
|
installed=windows_updates[i]["Installed"],
|
||||||
|
downloaded=windows_updates[i]["Downloaded"],
|
||||||
|
description=windows_updates[i]["Description"],
|
||||||
|
severity=windows_updates[i]["Severity"],
|
||||||
|
).save()
|
||||||
|
|
||||||
|
# agent histories
|
||||||
|
hist = AgentHistory()
|
||||||
|
hist.agent = agent
|
||||||
|
hist.type = "cmd_run"
|
||||||
|
hist.command = "ping google.com"
|
||||||
|
hist.username = "demo"
|
||||||
|
hist.results = ping_success_output
|
||||||
|
hist.save()
|
||||||
|
|
||||||
|
hist1 = AgentHistory()
|
||||||
|
hist1.agent = agent
|
||||||
|
hist1.type = "script_run"
|
||||||
|
hist1.script = clear_spool
|
||||||
|
hist1.script_results = {
|
||||||
|
"id": 1,
|
||||||
|
"stderr": "",
|
||||||
|
"stdout": spooler_stdout,
|
||||||
|
"execution_time": 3.5554593,
|
||||||
|
"retcode": 0,
|
||||||
|
}
|
||||||
|
hist1.save()
|
||||||
|
|
||||||
|
# disk space check
|
||||||
|
check1 = Check()
|
||||||
|
check_result1 = CheckResult(assigned_check=check1, agent=agent)
|
||||||
|
check1.agent = agent
|
||||||
|
check1.check_type = "diskspace"
|
||||||
|
check_result1.status = "passing"
|
||||||
|
check_result1.last_run = djangotime.now()
|
||||||
|
check_result1.more_info = "Total: 498.7GB, Free: 287.4GB"
|
||||||
|
check_result1.save()
|
||||||
|
|
||||||
|
check1.warning_threshold = 25
|
||||||
|
check1.error_threshold = 10
|
||||||
|
check1.disk = "C:"
|
||||||
|
check1.email_alert = random.choice([True, False])
|
||||||
|
check1.text_alert = random.choice([True, False])
|
||||||
|
check1.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check1_history = CheckHistory()
|
||||||
|
check1_history.check_id = check1.pk
|
||||||
|
check1_history.agent_id = agent.agent_id
|
||||||
|
check1_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
check1_history.y = random.randint(13, 40)
|
||||||
|
check1_history.save()
|
||||||
|
|
||||||
|
# ping check
|
||||||
|
check2 = Check()
|
||||||
|
check_result2 = CheckResult(assigned_check=check2, agent=agent)
|
||||||
|
check2.agent = agent
|
||||||
|
check2.check_type = "ping"
|
||||||
|
check_result2.last_run = djangotime.now()
|
||||||
|
check2.email_alert = random.choice([True, False])
|
||||||
|
check2.text_alert = random.choice([True, False])
|
||||||
|
|
||||||
|
if site in sites5:
|
||||||
|
check2.name = "Synology NAS"
|
||||||
|
check_result2.status = "failing"
|
||||||
|
check2.ip = "172.17.14.26"
|
||||||
|
check_result2.more_info = ping_fail_output
|
||||||
|
else:
|
||||||
|
check2.name = "Google"
|
||||||
|
check_result2.status = "passing"
|
||||||
|
check2.ip = "8.8.8.8"
|
||||||
|
check_result2.more_info = ping_success_output
|
||||||
|
|
||||||
|
check2.save()
|
||||||
|
check_result2.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check2_history = CheckHistory()
|
||||||
|
check2_history.check_id = check2.pk
|
||||||
|
check2_history.agent_id = agent.agent_id
|
||||||
|
check2_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
if site in sites5:
|
||||||
|
check2_history.y = 1
|
||||||
|
check2_history.results = ping_fail_output
|
||||||
|
else:
|
||||||
|
check2_history.y = 0
|
||||||
|
check2_history.results = ping_success_output
|
||||||
|
check2_history.save()
|
||||||
|
|
||||||
|
# cpu load check
|
||||||
|
check3 = Check()
|
||||||
|
check_result3 = CheckResult(assigned_check=check3, agent=agent)
|
||||||
|
check3.agent = agent
|
||||||
|
check3.check_type = "cpuload"
|
||||||
|
check_result3.status = "passing"
|
||||||
|
check_result3.last_run = djangotime.now()
|
||||||
|
check3.warning_threshold = 70
|
||||||
|
check3.error_threshold = 90
|
||||||
|
check_result3.history = [
|
||||||
|
15,
|
||||||
|
23,
|
||||||
|
16,
|
||||||
|
22,
|
||||||
|
22,
|
||||||
|
27,
|
||||||
|
15,
|
||||||
|
23,
|
||||||
|
23,
|
||||||
|
20,
|
||||||
|
10,
|
||||||
|
10,
|
||||||
|
13,
|
||||||
|
34,
|
||||||
|
]
|
||||||
|
check3.email_alert = random.choice([True, False])
|
||||||
|
check3.text_alert = random.choice([True, False])
|
||||||
|
check3.save()
|
||||||
|
check_result3.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check3_history = CheckHistory()
|
||||||
|
check3_history.check_id = check3.pk
|
||||||
|
check3_history.agent_id = agent.agent_id
|
||||||
|
check3_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
check3_history.y = random.randint(2, 79)
|
||||||
|
check3_history.save()
|
||||||
|
|
||||||
|
# memory check
|
||||||
|
check4 = Check()
|
||||||
|
check_result4 = CheckResult(assigned_check=check4, agent=agent)
|
||||||
|
check4.agent = agent
|
||||||
|
check4.check_type = "memory"
|
||||||
|
check_result4.status = "passing"
|
||||||
|
check4.warning_threshold = 70
|
||||||
|
check4.error_threshold = 85
|
||||||
|
check_result4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34]
|
||||||
|
check4.email_alert = random.choice([True, False])
|
||||||
|
check4.text_alert = random.choice([True, False])
|
||||||
|
check4.save()
|
||||||
|
check_result4.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check4_history = CheckHistory()
|
||||||
|
check4_history.check_id = check4.pk
|
||||||
|
check4_history.agent_id = agent.agent_id
|
||||||
|
check4_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
check4_history.y = random.randint(2, 79)
|
||||||
|
check4_history.save()
|
||||||
|
|
||||||
|
# script check storage pool
|
||||||
|
check5 = Check()
|
||||||
|
check_result5 = CheckResult(assigned_check=check5, agent=agent)
|
||||||
|
check5.agent = agent
|
||||||
|
check5.check_type = "script"
|
||||||
|
check_result5.status = "passing"
|
||||||
|
check_result5.last_run = djangotime.now()
|
||||||
|
check5.email_alert = random.choice([True, False])
|
||||||
|
check5.text_alert = random.choice([True, False])
|
||||||
|
check5.timeout = 120
|
||||||
|
check_result5.retcode = 0
|
||||||
|
check_result5.execution_time = "4.0000"
|
||||||
|
check5.script = check_pool_health
|
||||||
|
check5.save()
|
||||||
|
check_result5.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check5_history = CheckHistory()
|
||||||
|
check5_history.check_id = check5.pk
|
||||||
|
check5_history.agent_id = agent.agent_id
|
||||||
|
check5_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
if i == 10 or i == 18:
|
||||||
|
check5_history.y = 1
|
||||||
|
else:
|
||||||
|
check5_history.y = 0
|
||||||
|
check5_history.save()
|
||||||
|
|
||||||
|
check6 = Check()
|
||||||
|
check_result6 = CheckResult(assigned_check=check6, agent=agent)
|
||||||
|
check6.agent = agent
|
||||||
|
check6.check_type = "script"
|
||||||
|
check_result6.status = "passing"
|
||||||
|
check_result6.last_run = djangotime.now()
|
||||||
|
check6.email_alert = random.choice([True, False])
|
||||||
|
check6.text_alert = random.choice([True, False])
|
||||||
|
check6.timeout = 120
|
||||||
|
check_result6.retcode = 0
|
||||||
|
check_result6.execution_time = "4.0000"
|
||||||
|
check6.script = check_net_aware
|
||||||
|
check6.save()
|
||||||
|
check_result6.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check6_history = CheckHistory()
|
||||||
|
check6_history.check_id = check6.pk
|
||||||
|
check6_history.agent_id = agent.agent_id
|
||||||
|
check6_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
check6_history.y = 0
|
||||||
|
check6_history.save()
|
||||||
|
|
||||||
|
nla_task = AutomatedTask()
|
||||||
|
nla_task_result = TaskResult(task=nla_task, agent=agent)
|
||||||
|
nla_task.agent = agent
|
||||||
|
actions = [
|
||||||
|
{
|
||||||
|
"name": restart_nla.name,
|
||||||
|
"type": "script",
|
||||||
|
"script": restart_nla.pk,
|
||||||
|
"timeout": 90,
|
||||||
|
"script_args": [],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
nla_task.actions = actions
|
||||||
|
nla_task.assigned_check = check6
|
||||||
|
nla_task.name = "Restart NLA"
|
||||||
|
nla_task.task_type = "checkfailure"
|
||||||
|
nla_task_result.execution_time = "1.8443"
|
||||||
|
nla_task_result.last_run = djangotime.now()
|
||||||
|
nla_task_result.stdout = "no stdout"
|
||||||
|
nla_task_result.retcode = 0
|
||||||
|
nla_task_result.sync_status = "synced"
|
||||||
|
nla_task.save()
|
||||||
|
nla_task_result.save()
|
||||||
|
|
||||||
|
spool_task = AutomatedTask()
|
||||||
|
spool_task_result = TaskResult(task=spool_task, agent=agent)
|
||||||
|
spool_task.agent = agent
|
||||||
|
actions = [
|
||||||
|
{
|
||||||
|
"name": clear_spool.name,
|
||||||
|
"type": "script",
|
||||||
|
"script": clear_spool.pk,
|
||||||
|
"timeout": 90,
|
||||||
|
"script_args": [],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
spool_task.actions = actions
|
||||||
|
spool_task.name = "Clear the print spooler"
|
||||||
|
spool_task.task_type = "daily"
|
||||||
|
spool_task.run_time_date = djangotime.now() + djangotime.timedelta(
|
||||||
|
minutes=10
|
||||||
|
)
|
||||||
|
spool_task.expire_date = djangotime.now() + djangotime.timedelta(days=753)
|
||||||
|
spool_task.daily_interval = 1
|
||||||
|
spool_task.weekly_interval = 1
|
||||||
|
spool_task.task_repetition_duration = "2h"
|
||||||
|
spool_task.task_repetition_interval = "25m"
|
||||||
|
spool_task.random_task_delay = "3m"
|
||||||
|
spool_task_result.last_run = djangotime.now()
|
||||||
|
spool_task_result.retcode = 0
|
||||||
|
spool_task_result.stdout = spooler_stdout
|
||||||
|
spool_task_result.sync_status = "synced"
|
||||||
|
spool_task.save()
|
||||||
|
spool_task_result.save()
|
||||||
|
|
||||||
|
tmp_dir_task = AutomatedTask()
|
||||||
|
tmp_dir_task_result = TaskResult(task=tmp_dir_task, agent=agent)
|
||||||
|
tmp_dir_task.agent = agent
|
||||||
|
tmp_dir_task.name = "show temp dir files"
|
||||||
|
actions = [
|
||||||
|
{
|
||||||
|
"name": show_tmp_dir_script.name,
|
||||||
|
"type": "script",
|
||||||
|
"script": show_tmp_dir_script.pk,
|
||||||
|
"timeout": 90,
|
||||||
|
"script_args": [],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
tmp_dir_task.actions = actions
|
||||||
|
tmp_dir_task.task_type = "manual"
|
||||||
|
tmp_dir_task_result.last_run = djangotime.now()
|
||||||
|
tmp_dir_task_result.stdout = temp_dir_stdout
|
||||||
|
tmp_dir_task_result.retcode = 0
|
||||||
|
tmp_dir_task_result.sync_status = "synced"
|
||||||
|
tmp_dir_task.save()
|
||||||
|
tmp_dir_task_result.save()
|
||||||
|
|
||||||
|
check7 = Check()
|
||||||
|
check_result7 = CheckResult(assigned_check=check7, agent=agent)
|
||||||
|
check7.agent = agent
|
||||||
|
check7.check_type = "script"
|
||||||
|
check_result7.status = "passing"
|
||||||
|
check_result7.last_run = djangotime.now()
|
||||||
|
check7.email_alert = random.choice([True, False])
|
||||||
|
check7.text_alert = random.choice([True, False])
|
||||||
|
check7.timeout = 120
|
||||||
|
check_result7.retcode = 0
|
||||||
|
check_result7.execution_time = "3.1337"
|
||||||
|
check7.script = clear_spool
|
||||||
|
check_result7.stdout = spooler_stdout
|
||||||
|
check7.save()
|
||||||
|
check_result7.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check7_history = CheckHistory()
|
||||||
|
check7_history.check_id = check7.pk
|
||||||
|
check7_history.agent_id = agent.agent_id
|
||||||
|
check7_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
check7_history.y = 0
|
||||||
|
check7_history.save()
|
||||||
|
|
||||||
|
check8 = Check()
|
||||||
|
check_result8 = CheckResult(assigned_check=check8, agent=agent)
|
||||||
|
check8.agent = agent
|
||||||
|
check8.check_type = "winsvc"
|
||||||
|
check_result8.status = "passing"
|
||||||
|
check_result8.last_run = djangotime.now()
|
||||||
|
check8.email_alert = random.choice([True, False])
|
||||||
|
check8.text_alert = random.choice([True, False])
|
||||||
|
check_result8.more_info = "Status RUNNING"
|
||||||
|
check8.fails_b4_alert = 4
|
||||||
|
check8.svc_name = "Spooler"
|
||||||
|
check8.svc_display_name = "Print Spooler"
|
||||||
|
check8.pass_if_start_pending = False
|
||||||
|
check8.restart_if_stopped = True
|
||||||
|
check8.save()
|
||||||
|
check_result8.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check8_history = CheckHistory()
|
||||||
|
check8_history.check_id = check8.pk
|
||||||
|
check8_history.agent_id = agent.agent_id
|
||||||
|
check8_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
if i == 10 or i == 18:
|
||||||
|
check8_history.y = 1
|
||||||
|
check8_history.results = "Status STOPPED"
|
||||||
|
else:
|
||||||
|
check8_history.y = 0
|
||||||
|
check8_history.results = "Status RUNNING"
|
||||||
|
check8_history.save()
|
||||||
|
|
||||||
|
check9 = Check()
|
||||||
|
check_result9 = CheckResult(assigned_check=check9, agent=agent)
|
||||||
|
check9.agent = agent
|
||||||
|
check9.check_type = "eventlog"
|
||||||
|
check9.name = "unexpected shutdown"
|
||||||
|
|
||||||
|
check_result9.last_run = djangotime.now()
|
||||||
|
check9.email_alert = random.choice([True, False])
|
||||||
|
check9.text_alert = random.choice([True, False])
|
||||||
|
check9.fails_b4_alert = 2
|
||||||
|
|
||||||
|
if site in sites5:
|
||||||
|
check_result9.extra_details = eventlog_check_fail_data
|
||||||
|
check_result9.status = "failing"
|
||||||
|
else:
|
||||||
|
check_result9.extra_details = {"log": []}
|
||||||
|
check_result9.status = "passing"
|
||||||
|
|
||||||
|
check9.log_name = "Application"
|
||||||
|
check9.event_id = 1001
|
||||||
|
check9.event_type = "INFO"
|
||||||
|
check9.fail_when = "contains"
|
||||||
|
check9.search_last_days = 30
|
||||||
|
|
||||||
|
check9.save()
|
||||||
|
check_result9.save()
|
||||||
|
|
||||||
|
for i in range(30):
|
||||||
|
check9_history = CheckHistory()
|
||||||
|
check9_history.check_id = check9.pk
|
||||||
|
check9_history.agent_id = agent.agent_id
|
||||||
|
check9_history.x = djangotime.now() - djangotime.timedelta(
|
||||||
|
minutes=i * 2
|
||||||
|
)
|
||||||
|
if i == 10 or i == 18:
|
||||||
|
check9_history.y = 1
|
||||||
|
check9_history.results = "Events Found: 16"
|
||||||
|
else:
|
||||||
|
check9_history.y = 0
|
||||||
|
check9_history.results = "Events Found: 0"
|
||||||
|
check9_history.save()
|
||||||
|
|
||||||
|
pick = random.randint(1, 10)
|
||||||
|
|
||||||
|
if pick == 5 or pick == 3:
|
||||||
|
|
||||||
|
reboot_time = djangotime.now() + djangotime.timedelta(
|
||||||
|
minutes=random.randint(1000, 500000)
|
||||||
|
)
|
||||||
|
date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M")
|
||||||
|
|
||||||
|
obj = dt.datetime.strptime(date_obj, "%Y-%m-%d %H:%M")
|
||||||
|
|
||||||
|
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||||
|
random.choice(string.ascii_letters) for _ in range(10)
|
||||||
|
)
|
||||||
|
|
||||||
|
sched_reboot = PendingAction()
|
||||||
|
sched_reboot.agent = agent
|
||||||
|
sched_reboot.action_type = "schedreboot"
|
||||||
|
sched_reboot.details = {
|
||||||
|
"time": str(obj),
|
||||||
|
"taskname": task_name,
|
||||||
|
}
|
||||||
|
sched_reboot.save()
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS(f"Added agent # {count_agents + 1}"))
|
||||||
|
|
||||||
|
call_command("load_demo_scripts")
|
||||||
|
self.stdout.write("done")
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Changes existing agents salt_id from a property to a model field"
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
agents = Agent.objects.filter(salt_id=None)
|
|
||||||
for agent in agents:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(f"Setting salt_id on {agent.hostname}")
|
|
||||||
)
|
|
||||||
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
|
||||||
agent.save(update_fields=["salt_id"])
|
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
|
from agents.models import Agent
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Shows online agents that are not on the latest version"
|
help = "Shows online agents that are not on the latest version"
|
||||||
|
|||||||
25
api/tacticalrmm/agents/management/commands/update_agents.py
Normal file
25
api/tacticalrmm/agents/management/commands/update_agents.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from agents.models import Agent
|
||||||
|
from agents.tasks import send_agent_update_task
|
||||||
|
from core.utils import get_core_settings
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from packaging import version as pyver
|
||||||
|
|
||||||
|
from tacticalrmm.constants import AGENT_DEFER
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Triggers an agent update task to run"
|
||||||
|
|
||||||
|
def handle(self, *args, **kwargs):
|
||||||
|
core = get_core_settings()
|
||||||
|
if not core.agent_auto_update:
|
||||||
|
return
|
||||||
|
|
||||||
|
q = Agent.objects.defer(*AGENT_DEFER).exclude(version=settings.LATEST_AGENT_VER)
|
||||||
|
agent_ids: list[str] = [
|
||||||
|
i.agent_id
|
||||||
|
for i in q
|
||||||
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
|
]
|
||||||
|
send_agent_update_task.delay(agent_ids=agent_ids)
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.2.1 on 2021-07-06 02:01
|
# Generated by Django 3.2.1 on 2021-07-06 02:01
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Generated by Django 3.2.5 on 2021-07-14 07:38
|
# Generated by Django 3.2.5 on 2021-07-14 07:38
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|||||||
28
api/tacticalrmm/agents/migrations/0040_auto_20211010_0249.py
Normal file
28
api/tacticalrmm/agents/migrations/0040_auto_20211010_0249.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-10-10 02:49
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0039_auto_20210714_0738'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='agent',
|
||||||
|
name='agent_id',
|
||||||
|
field=models.CharField(max_length=200, unique=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='agent',
|
||||||
|
name='created_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='agent',
|
||||||
|
name='modified_by',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-10-18 03:04
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0040_auto_20211010_0249'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='agenthistory',
|
||||||
|
name='username',
|
||||||
|
field=models.CharField(default='system', max_length=255),
|
||||||
|
),
|
||||||
|
]
|
||||||
File diff suppressed because one or more lines are too long
25
api/tacticalrmm/agents/migrations/0043_auto_20220227_0554.py
Normal file
25
api/tacticalrmm/agents/migrations/0043_auto_20220227_0554.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-02-27 05:54
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0042_alter_agent_time_zone'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='agent',
|
||||||
|
name='antivirus',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='agent',
|
||||||
|
name='local_ip',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='agent',
|
||||||
|
name='used_ram',
|
||||||
|
),
|
||||||
|
]
|
||||||
22
api/tacticalrmm/agents/migrations/0044_auto_20220227_0717.py
Normal file
22
api/tacticalrmm/agents/migrations/0044_auto_20220227_0717.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-02-27 07:17
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0043_auto_20220227_0554'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='agent',
|
||||||
|
old_name='salt_id',
|
||||||
|
new_name='goarch',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='agent',
|
||||||
|
name='salt_ver',
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-03-12 02:30
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0044_auto_20220227_0717'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name='RecoveryAction',
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-03-17 17:15
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0045_delete_recoveryaction'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='agenthistory',
|
||||||
|
name='command',
|
||||||
|
field=models.TextField(blank=True, default='', null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 4.0.3 on 2022-04-07 17:28
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('clients', '0020_auto_20211226_0547'),
|
||||||
|
('agents', '0046_alter_agenthistory_command'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='agent',
|
||||||
|
name='plat',
|
||||||
|
field=models.CharField(default='windows', max_length=255),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='agent',
|
||||||
|
name='site',
|
||||||
|
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.RESTRICT, related_name='agents', to='clients.site'),
|
||||||
|
preserve_default=False,
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 4.0.3 on 2022-04-16 17:39
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0047_alter_agent_plat_alter_agent_site'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='agent',
|
||||||
|
name='has_patches_pending',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='agent',
|
||||||
|
name='pending_actions_count',
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
# Generated by Django 4.0.3 on 2022-04-18 14:29
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('agents', '0048_remove_agent_has_patches_pending_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='agent',
|
||||||
|
index=models.Index(fields=['monitoring_type'], name='agents_agen_monitor_df8816_idx'),
|
||||||
|
),
|
||||||
|
]
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,63 +1,123 @@
|
|||||||
from rest_framework import permissions
|
from rest_framework import permissions
|
||||||
|
|
||||||
from tacticalrmm.permissions import _has_perm
|
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||||
|
|
||||||
|
|
||||||
|
class AgentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view) -> bool:
|
||||||
|
if r.method == "GET":
|
||||||
|
if "agent_id" in view.kwargs.keys():
|
||||||
|
return _has_perm(r, "can_list_agents") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return _has_perm(r, "can_list_agents")
|
||||||
|
elif r.method == "DELETE":
|
||||||
|
return _has_perm(r, "can_uninstall_agents") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if r.path == "/agents/maintenance/bulk/":
|
||||||
|
return _has_perm(r, "can_edit_agent")
|
||||||
|
else:
|
||||||
|
return _has_perm(r, "can_edit_agent") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RecoverAgentPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view) -> bool:
|
||||||
|
return _has_perm(r, "can_recover_agents") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MeshPerms(permissions.BasePermission):
|
class MeshPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_use_mesh")
|
return _has_perm(r, "can_use_mesh") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
class UninstallPerms(permissions.BasePermission):
|
|
||||||
def has_permission(self, r, view):
|
|
||||||
return _has_perm(r, "can_uninstall_agents")
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateAgentPerms(permissions.BasePermission):
|
class UpdateAgentPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_update_agents")
|
return _has_perm(r, "can_update_agents")
|
||||||
|
|
||||||
|
|
||||||
class EditAgentPerms(permissions.BasePermission):
|
class PingAgentPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_edit_agent")
|
return _has_perm(r, "can_ping_agents") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ManageProcPerms(permissions.BasePermission):
|
class ManageProcPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_manage_procs")
|
return _has_perm(r, "can_manage_procs") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class EvtLogPerms(permissions.BasePermission):
|
class EvtLogPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_view_eventlogs")
|
return _has_perm(r, "can_view_eventlogs") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SendCMDPerms(permissions.BasePermission):
|
class SendCMDPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_send_cmd")
|
return _has_perm(r, "can_send_cmd") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RebootAgentPerms(permissions.BasePermission):
|
class RebootAgentPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_reboot_agents")
|
return _has_perm(r, "can_reboot_agents") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class InstallAgentPerms(permissions.BasePermission):
|
class InstallAgentPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_install_agents")
|
return _has_perm(r, "can_install_agents")
|
||||||
|
|
||||||
|
|
||||||
class RunScriptPerms(permissions.BasePermission):
|
class RunScriptPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_run_scripts")
|
return _has_perm(r, "can_run_scripts") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ManageNotesPerms(permissions.BasePermission):
|
class AgentNotesPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
|
|
||||||
|
# permissions for GET /agents/notes/ endpoint
|
||||||
|
if r.method == "GET":
|
||||||
|
|
||||||
|
# permissions for /agents/<agent_id>/notes endpoint
|
||||||
|
if "agent_id" in view.kwargs.keys():
|
||||||
|
return _has_perm(r, "can_list_notes") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return _has_perm(r, "can_list_notes")
|
||||||
|
else:
|
||||||
return _has_perm(r, "can_manage_notes")
|
return _has_perm(r, "can_manage_notes")
|
||||||
|
|
||||||
|
|
||||||
class RunBulkPerms(permissions.BasePermission):
|
class RunBulkPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
return _has_perm(r, "can_run_bulk")
|
return _has_perm(r, "can_run_bulk")
|
||||||
|
|
||||||
|
|
||||||
|
class AgentHistoryPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view) -> bool:
|
||||||
|
if "agent_id" in view.kwargs.keys():
|
||||||
|
return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent(
|
||||||
|
r.user, view.kwargs["agent_id"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return _has_perm(r, "can_list_agent_history")
|
||||||
|
|||||||
@@ -1,118 +1,8 @@
|
|||||||
import pytz
|
import pytz
|
||||||
from clients.serializers import ClientSerializer
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from tacticalrmm.utils import get_default_timezone
|
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Agent, AgentCustomField, Note, AgentHistory
|
from .models import Agent, AgentCustomField, AgentHistory, Note
|
||||||
|
|
||||||
|
|
||||||
class AgentSerializer(serializers.ModelSerializer):
|
|
||||||
# for vue
|
|
||||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
|
||||||
status = serializers.ReadOnlyField()
|
|
||||||
cpu_model = serializers.ReadOnlyField()
|
|
||||||
local_ips = serializers.ReadOnlyField()
|
|
||||||
make_model = serializers.ReadOnlyField()
|
|
||||||
physical_disks = serializers.ReadOnlyField()
|
|
||||||
graphics = serializers.ReadOnlyField()
|
|
||||||
checks = serializers.ReadOnlyField()
|
|
||||||
timezone = serializers.ReadOnlyField()
|
|
||||||
all_timezones = serializers.SerializerMethodField()
|
|
||||||
client_name = serializers.ReadOnlyField(source="client.name")
|
|
||||||
site_name = serializers.ReadOnlyField(source="site.name")
|
|
||||||
|
|
||||||
def get_all_timezones(self, obj):
|
|
||||||
return pytz.all_timezones
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Agent
|
|
||||||
exclude = [
|
|
||||||
"last_seen",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Agent
|
|
||||||
fields = [
|
|
||||||
"pk",
|
|
||||||
"overdue_email_alert",
|
|
||||||
"overdue_text_alert",
|
|
||||||
"overdue_dashboard_alert",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class AgentTableSerializer(serializers.ModelSerializer):
|
|
||||||
status = serializers.ReadOnlyField()
|
|
||||||
checks = serializers.ReadOnlyField()
|
|
||||||
last_seen = serializers.SerializerMethodField()
|
|
||||||
client_name = serializers.ReadOnlyField(source="client.name")
|
|
||||||
site_name = serializers.ReadOnlyField(source="site.name")
|
|
||||||
logged_username = serializers.SerializerMethodField()
|
|
||||||
italic = serializers.SerializerMethodField()
|
|
||||||
policy = serializers.ReadOnlyField(source="policy.id")
|
|
||||||
alert_template = serializers.SerializerMethodField()
|
|
||||||
|
|
||||||
def get_alert_template(self, obj):
|
|
||||||
|
|
||||||
if not obj.alert_template:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return {
|
|
||||||
"name": obj.alert_template.name,
|
|
||||||
"always_email": obj.alert_template.agent_always_email,
|
|
||||||
"always_text": obj.alert_template.agent_always_text,
|
|
||||||
"always_alert": obj.alert_template.agent_always_alert,
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_last_seen(self, obj) -> str:
|
|
||||||
if obj.time_zone is not None:
|
|
||||||
agent_tz = pytz.timezone(obj.time_zone)
|
|
||||||
else:
|
|
||||||
agent_tz = self.context["default_tz"]
|
|
||||||
|
|
||||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M")
|
|
||||||
|
|
||||||
def get_logged_username(self, obj) -> str:
|
|
||||||
if obj.logged_in_username == "None" and obj.status == "online":
|
|
||||||
return obj.last_logged_in_user
|
|
||||||
elif obj.logged_in_username != "None":
|
|
||||||
return obj.logged_in_username
|
|
||||||
else:
|
|
||||||
return "-"
|
|
||||||
|
|
||||||
def get_italic(self, obj) -> bool:
|
|
||||||
return obj.logged_in_username == "None" and obj.status == "online"
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Agent
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"alert_template",
|
|
||||||
"hostname",
|
|
||||||
"agent_id",
|
|
||||||
"site_name",
|
|
||||||
"client_name",
|
|
||||||
"monitoring_type",
|
|
||||||
"description",
|
|
||||||
"needs_reboot",
|
|
||||||
"has_patches_pending",
|
|
||||||
"pending_actions_count",
|
|
||||||
"status",
|
|
||||||
"overdue_text_alert",
|
|
||||||
"overdue_email_alert",
|
|
||||||
"overdue_dashboard_alert",
|
|
||||||
"last_seen",
|
|
||||||
"boot_time",
|
|
||||||
"checks",
|
|
||||||
"maintenance_mode",
|
|
||||||
"logged_username",
|
|
||||||
"italic",
|
|
||||||
"policy",
|
|
||||||
"block_policy_inheritance",
|
|
||||||
]
|
|
||||||
depth = 2
|
|
||||||
|
|
||||||
|
|
||||||
class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
||||||
@@ -134,37 +24,123 @@ class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class AgentEditSerializer(serializers.ModelSerializer):
|
class AgentSerializer(serializers.ModelSerializer):
|
||||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||||
|
status = serializers.ReadOnlyField()
|
||||||
|
cpu_model = serializers.ReadOnlyField()
|
||||||
|
local_ips = serializers.ReadOnlyField()
|
||||||
|
make_model = serializers.ReadOnlyField()
|
||||||
|
physical_disks = serializers.ReadOnlyField()
|
||||||
|
graphics = serializers.ReadOnlyField()
|
||||||
|
checks = serializers.ReadOnlyField()
|
||||||
|
timezone = serializers.ReadOnlyField()
|
||||||
all_timezones = serializers.SerializerMethodField()
|
all_timezones = serializers.SerializerMethodField()
|
||||||
client = ClientSerializer(read_only=True)
|
client = serializers.ReadOnlyField(source="client.name")
|
||||||
|
site_name = serializers.ReadOnlyField(source="site.name")
|
||||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||||
|
patches_last_installed = serializers.ReadOnlyField()
|
||||||
|
last_seen = serializers.ReadOnlyField()
|
||||||
|
applied_policies = serializers.SerializerMethodField()
|
||||||
|
effective_patch_policy = serializers.SerializerMethodField()
|
||||||
|
alert_template = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_alert_template(self, obj):
|
||||||
|
from alerts.serializers import AlertTemplateSerializer
|
||||||
|
|
||||||
|
return (
|
||||||
|
AlertTemplateSerializer(obj.alert_template).data
|
||||||
|
if obj.alert_template
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_effective_patch_policy(self, obj):
|
||||||
|
return WinUpdatePolicySerializer(obj.get_patch_policy()).data
|
||||||
|
|
||||||
|
def get_applied_policies(self, obj):
|
||||||
|
from automation.serializers import PolicySerializer
|
||||||
|
|
||||||
|
policies = obj.get_agent_policies()
|
||||||
|
|
||||||
|
# need to serialize model objects manually
|
||||||
|
for key, policy in policies.items():
|
||||||
|
if policy:
|
||||||
|
policies[key] = PolicySerializer(policy).data
|
||||||
|
|
||||||
|
return policies
|
||||||
|
|
||||||
def get_all_timezones(self, obj):
|
def get_all_timezones(self, obj):
|
||||||
return pytz.all_timezones
|
return pytz.all_timezones
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Agent
|
||||||
|
exclude = ["id"]
|
||||||
|
|
||||||
|
|
||||||
|
class AgentTableSerializer(serializers.ModelSerializer):
|
||||||
|
status = serializers.ReadOnlyField()
|
||||||
|
checks = serializers.ReadOnlyField()
|
||||||
|
client_name = serializers.ReadOnlyField(source="client.name")
|
||||||
|
site_name = serializers.ReadOnlyField(source="site.name")
|
||||||
|
logged_username = serializers.SerializerMethodField()
|
||||||
|
italic = serializers.SerializerMethodField()
|
||||||
|
policy = serializers.ReadOnlyField(source="policy.id")
|
||||||
|
alert_template = serializers.SerializerMethodField()
|
||||||
|
last_seen = serializers.ReadOnlyField()
|
||||||
|
pending_actions_count = serializers.ReadOnlyField()
|
||||||
|
has_patches_pending = serializers.ReadOnlyField()
|
||||||
|
|
||||||
|
def get_alert_template(self, obj):
|
||||||
|
|
||||||
|
if not obj.alert_template:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"name": obj.alert_template.name,
|
||||||
|
"always_email": obj.alert_template.agent_always_email,
|
||||||
|
"always_text": obj.alert_template.agent_always_text,
|
||||||
|
"always_alert": obj.alert_template.agent_always_alert,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_logged_username(self, obj) -> str:
|
||||||
|
if obj.logged_in_username == "None" and obj.status == "online":
|
||||||
|
return obj.last_logged_in_user
|
||||||
|
elif obj.logged_in_username != "None":
|
||||||
|
return obj.logged_in_username
|
||||||
|
else:
|
||||||
|
return "-"
|
||||||
|
|
||||||
|
def get_italic(self, obj) -> bool:
|
||||||
|
return obj.logged_in_username == "None" and obj.status == "online"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Agent
|
model = Agent
|
||||||
fields = [
|
fields = [
|
||||||
"id",
|
"agent_id",
|
||||||
|
"alert_template",
|
||||||
"hostname",
|
"hostname",
|
||||||
"client",
|
"site_name",
|
||||||
"site",
|
"client_name",
|
||||||
"monitoring_type",
|
"monitoring_type",
|
||||||
"description",
|
"description",
|
||||||
"time_zone",
|
"needs_reboot",
|
||||||
"timezone",
|
"pending_actions_count",
|
||||||
"check_interval",
|
"status",
|
||||||
"overdue_time",
|
|
||||||
"offline_time",
|
|
||||||
"overdue_text_alert",
|
"overdue_text_alert",
|
||||||
"overdue_email_alert",
|
"overdue_email_alert",
|
||||||
"overdue_dashboard_alert",
|
"overdue_dashboard_alert",
|
||||||
"all_timezones",
|
"last_seen",
|
||||||
"winupdatepolicy",
|
"boot_time",
|
||||||
|
"checks",
|
||||||
|
"maintenance_mode",
|
||||||
|
"logged_username",
|
||||||
|
"italic",
|
||||||
"policy",
|
"policy",
|
||||||
"custom_fields",
|
"block_policy_inheritance",
|
||||||
|
"plat",
|
||||||
|
"goarch",
|
||||||
|
"has_patches_pending",
|
||||||
]
|
]
|
||||||
|
depth = 2
|
||||||
|
|
||||||
|
|
||||||
class WinAgentSerializer(serializers.ModelSerializer):
|
class WinAgentSerializer(serializers.ModelSerializer):
|
||||||
@@ -180,41 +156,31 @@ class AgentHostnameSerializer(serializers.ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
model = Agent
|
model = Agent
|
||||||
fields = (
|
fields = (
|
||||||
|
"id",
|
||||||
"hostname",
|
"hostname",
|
||||||
"pk",
|
"agent_id",
|
||||||
"client",
|
"client",
|
||||||
"site",
|
"site",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class NoteSerializer(serializers.ModelSerializer):
|
class AgentNoteSerializer(serializers.ModelSerializer):
|
||||||
username = serializers.ReadOnlyField(source="user.username")
|
username = serializers.ReadOnlyField(source="user.username")
|
||||||
|
agent_id = serializers.ReadOnlyField(source="agent.agent_id")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Note
|
model = Note
|
||||||
fields = "__all__"
|
fields = ("pk", "entry_time", "agent", "user", "note", "username", "agent_id")
|
||||||
|
extra_kwargs = {"agent": {"write_only": True}, "user": {"write_only": True}}
|
||||||
|
|
||||||
class NotesSerializer(serializers.ModelSerializer):
|
|
||||||
notes = NoteSerializer(many=True, read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Agent
|
|
||||||
fields = ["hostname", "pk", "notes"]
|
|
||||||
|
|
||||||
|
|
||||||
class AgentHistorySerializer(serializers.ModelSerializer):
|
class AgentHistorySerializer(serializers.ModelSerializer):
|
||||||
time = serializers.SerializerMethodField(read_only=True)
|
|
||||||
script_name = serializers.ReadOnlyField(source="script.name")
|
script_name = serializers.ReadOnlyField(source="script.name")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AgentHistory
|
model = AgentHistory
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
def get_time(self, history):
|
|
||||||
timezone = get_default_timezone()
|
|
||||||
return history.time.astimezone(timezone).strftime("%m %d %Y %H:%M:%S")
|
|
||||||
|
|
||||||
|
|
||||||
class AgentAuditSerializer(serializers.ModelSerializer):
|
class AgentAuditSerializer(serializers.ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|||||||
@@ -1,27 +1,24 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
import random
|
import random
|
||||||
import urllib.parse
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import Union
|
from typing import Optional
|
||||||
|
|
||||||
from alerts.models import Alert
|
from agents.models import Agent
|
||||||
from core.models import CodeSignToken, CoreSettings
|
from agents.utils import get_agent_url
|
||||||
|
from core.utils import get_core_settings
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from logs.models import DebugLog, PendingAction
|
from logs.models import DebugLog, PendingAction
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
from scripts.models import Script
|
from scripts.models import Script
|
||||||
|
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
from tacticalrmm.utils import run_nats_api_cmd
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
|
|
||||||
|
|
||||||
def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str:
|
def agent_update(agent_id: str, force: bool = False) -> str:
|
||||||
from agents.utils import get_exegen_url
|
|
||||||
|
|
||||||
agent = Agent.objects.get(pk=pk)
|
agent = Agent.objects.get(agent_id=agent_id)
|
||||||
|
|
||||||
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
|
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
|
||||||
return "not supported"
|
return "not supported"
|
||||||
@@ -31,19 +28,13 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
|||||||
DebugLog.warning(
|
DebugLog.warning(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
log_type="agent_issues",
|
log_type="agent_issues",
|
||||||
message=f"Unable to determine arch on {agent.hostname}({agent.pk}). Skipping agent update.",
|
message=f"Unable to determine arch on {agent.hostname}({agent.agent_id}). Skipping agent update.",
|
||||||
)
|
)
|
||||||
return "noarch"
|
return "noarch"
|
||||||
|
|
||||||
version = settings.LATEST_AGENT_VER
|
version = settings.LATEST_AGENT_VER
|
||||||
inno = agent.win_inno_exe
|
inno = agent.win_inno_exe
|
||||||
|
url = get_agent_url(agent.arch, agent.plat)
|
||||||
if codesigntoken is not None and pyver.parse(version) >= pyver.parse("1.5.0"):
|
|
||||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
|
||||||
params = {"version": version, "arch": agent.arch, "token": codesigntoken}
|
|
||||||
url = base_url + urllib.parse.urlencode(params)
|
|
||||||
else:
|
|
||||||
url = agent.winagent_dl
|
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
if agent.pendingactions.filter(
|
if agent.pendingactions.filter(
|
||||||
@@ -76,69 +67,54 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def force_code_sign(pks: list[int]) -> None:
|
def force_code_sign(agent_ids: list[str]) -> None:
|
||||||
try:
|
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
|
||||||
token = CodeSignToken.objects.first().tokenv # type:ignore
|
|
||||||
except:
|
|
||||||
return
|
|
||||||
|
|
||||||
chunks = (pks[i : i + 50] for i in range(0, len(pks), 50))
|
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for pk in chunk:
|
for agent_id in chunk:
|
||||||
agent_update(pk=pk, codesigntoken=token, force=True)
|
agent_update(agent_id=agent_id, force=True)
|
||||||
sleep(0.05)
|
sleep(2)
|
||||||
sleep(4)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def send_agent_update_task(pks: list[int]) -> None:
|
def send_agent_update_task(agent_ids: list[str]) -> None:
|
||||||
try:
|
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
|
||||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
|
||||||
except:
|
|
||||||
codesigntoken = None
|
|
||||||
|
|
||||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for pk in chunk:
|
for agent_id in chunk:
|
||||||
agent_update(pk, codesigntoken)
|
agent_update(agent_id)
|
||||||
sleep(0.05)
|
sleep(2)
|
||||||
sleep(4)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def auto_self_agent_update_task() -> None:
|
def auto_self_agent_update_task() -> None:
|
||||||
core = CoreSettings.objects.first()
|
core = get_core_settings()
|
||||||
if not core.agent_auto_update: # type:ignore
|
if not core.agent_auto_update:
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
q = Agent.objects.only("agent_id", "version")
|
||||||
codesigntoken = CodeSignToken.objects.first().token # type:ignore
|
agent_ids: list[str] = [
|
||||||
except:
|
i.agent_id
|
||||||
codesigntoken = None
|
|
||||||
|
|
||||||
q = Agent.objects.only("pk", "version")
|
|
||||||
pks: list[int] = [
|
|
||||||
i.pk
|
|
||||||
for i in q
|
for i in q
|
||||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||||
]
|
]
|
||||||
|
|
||||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
chunks = (agent_ids[i : i + 30] for i in range(0, len(agent_ids), 30))
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for pk in chunk:
|
for agent_id in chunk:
|
||||||
agent_update(pk, codesigntoken)
|
agent_update(agent_id)
|
||||||
sleep(0.05)
|
sleep(2)
|
||||||
sleep(4)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) -> str:
|
||||||
from alerts.models import Alert
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
try:
|
||||||
alert = Alert.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
except Alert.DoesNotExist:
|
||||||
|
return "alert not found"
|
||||||
|
|
||||||
if not alert.email_sent:
|
if not alert.email_sent:
|
||||||
sleep(random.randint(1, 15))
|
sleep(random.randint(1, 5))
|
||||||
alert.agent.send_outage_email()
|
alert.agent.send_outage_email()
|
||||||
alert.email_sent = djangotime.now()
|
alert.email_sent = djangotime.now()
|
||||||
alert.save(update_fields=["email_sent"])
|
alert.save(update_fields=["email_sent"])
|
||||||
@@ -147,7 +123,7 @@ def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None)
|
|||||||
# send an email only if the last email sent is older than alert interval
|
# send an email only if the last email sent is older than alert interval
|
||||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
if alert.email_sent < delta:
|
if alert.email_sent < delta:
|
||||||
sleep(random.randint(1, 10))
|
sleep(random.randint(1, 5))
|
||||||
alert.agent.send_outage_email()
|
alert.agent.send_outage_email()
|
||||||
alert.email_sent = djangotime.now()
|
alert.email_sent = djangotime.now()
|
||||||
alert.save(update_fields=["email_sent"])
|
alert.save(update_fields=["email_sent"])
|
||||||
@@ -159,8 +135,13 @@ def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None)
|
|||||||
def agent_recovery_email_task(pk: int) -> str:
|
def agent_recovery_email_task(pk: int) -> str:
|
||||||
from alerts.models import Alert
|
from alerts.models import Alert
|
||||||
|
|
||||||
sleep(random.randint(1, 15))
|
sleep(random.randint(1, 5))
|
||||||
|
|
||||||
|
try:
|
||||||
alert = Alert.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
except Alert.DoesNotExist:
|
||||||
|
return "alert not found"
|
||||||
|
|
||||||
alert.agent.send_recovery_email()
|
alert.agent.send_recovery_email()
|
||||||
alert.resolved_email_sent = djangotime.now()
|
alert.resolved_email_sent = djangotime.now()
|
||||||
alert.save(update_fields=["resolved_email_sent"])
|
alert.save(update_fields=["resolved_email_sent"])
|
||||||
@@ -169,13 +150,16 @@ def agent_recovery_email_task(pk: int) -> str:
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> str:
|
||||||
from alerts.models import Alert
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
try:
|
||||||
alert = Alert.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
except Alert.DoesNotExist:
|
||||||
|
return "alert not found"
|
||||||
|
|
||||||
if not alert.sms_sent:
|
if not alert.sms_sent:
|
||||||
sleep(random.randint(1, 15))
|
sleep(random.randint(1, 3))
|
||||||
alert.agent.send_outage_sms()
|
alert.agent.send_outage_sms()
|
||||||
alert.sms_sent = djangotime.now()
|
alert.sms_sent = djangotime.now()
|
||||||
alert.save(update_fields=["sms_sent"])
|
alert.save(update_fields=["sms_sent"])
|
||||||
@@ -184,7 +168,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) ->
|
|||||||
# send an sms only if the last sms sent is older than alert interval
|
# send an sms only if the last sms sent is older than alert interval
|
||||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||||
if alert.sms_sent < delta:
|
if alert.sms_sent < delta:
|
||||||
sleep(random.randint(1, 10))
|
sleep(random.randint(1, 3))
|
||||||
alert.agent.send_outage_sms()
|
alert.agent.send_outage_sms()
|
||||||
alert.sms_sent = djangotime.now()
|
alert.sms_sent = djangotime.now()
|
||||||
alert.save(update_fields=["sms_sent"])
|
alert.save(update_fields=["sms_sent"])
|
||||||
@@ -197,7 +181,11 @@ def agent_recovery_sms_task(pk: int) -> str:
|
|||||||
from alerts.models import Alert
|
from alerts.models import Alert
|
||||||
|
|
||||||
sleep(random.randint(1, 3))
|
sleep(random.randint(1, 3))
|
||||||
|
try:
|
||||||
alert = Alert.objects.get(pk=pk)
|
alert = Alert.objects.get(pk=pk)
|
||||||
|
except Alert.DoesNotExist:
|
||||||
|
return "alert not found"
|
||||||
|
|
||||||
alert.agent.send_recovery_sms()
|
alert.agent.send_recovery_sms()
|
||||||
alert.resolved_sms_sent = djangotime.now()
|
alert.resolved_sms_sent = djangotime.now()
|
||||||
alert.save(update_fields=["resolved_sms_sent"])
|
alert.save(update_fields=["resolved_sms_sent"])
|
||||||
@@ -252,7 +240,7 @@ def run_script_email_results_task(
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
CORE = CoreSettings.objects.first()
|
CORE = get_core_settings()
|
||||||
subject = f"{agent.hostname} {script.name} Results"
|
subject = f"{agent.hostname} {script.name} Results"
|
||||||
exec_time = "{:.4f}".format(r["execution_time"])
|
exec_time = "{:.4f}".format(r["execution_time"])
|
||||||
body = (
|
body = (
|
||||||
@@ -265,48 +253,48 @@ def run_script_email_results_task(
|
|||||||
|
|
||||||
msg = EmailMessage()
|
msg = EmailMessage()
|
||||||
msg["Subject"] = subject
|
msg["Subject"] = subject
|
||||||
msg["From"] = CORE.smtp_from_email # type:ignore
|
msg["From"] = CORE.smtp_from_email
|
||||||
|
|
||||||
if emails:
|
if emails:
|
||||||
msg["To"] = ", ".join(emails)
|
msg["To"] = ", ".join(emails)
|
||||||
else:
|
else:
|
||||||
msg["To"] = ", ".join(CORE.email_alert_recipients) # type:ignore
|
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||||
|
|
||||||
msg.set_content(body)
|
msg.set_content(body)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with smtplib.SMTP(
|
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||||
CORE.smtp_host, CORE.smtp_port, timeout=20 # type:ignore
|
if CORE.smtp_requires_auth:
|
||||||
) as server: # type:ignore
|
|
||||||
if CORE.smtp_requires_auth: # type:ignore
|
|
||||||
server.ehlo()
|
server.ehlo()
|
||||||
server.starttls()
|
server.starttls()
|
||||||
server.login(
|
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||||
CORE.smtp_host_user, CORE.smtp_host_password # type:ignore
|
|
||||||
) # type:ignore
|
|
||||||
server.send_message(msg)
|
server.send_message(msg)
|
||||||
server.quit()
|
server.quit()
|
||||||
else:
|
else:
|
||||||
server.send_message(msg)
|
server.send_message(msg)
|
||||||
server.quit()
|
server.quit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
DebugLog.error(message=e)
|
DebugLog.error(message=str(e))
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def clear_faults_task(older_than_days: int) -> None:
|
def clear_faults_task(older_than_days: int) -> None:
|
||||||
# https://github.com/wh1te909/tacticalrmm/issues/484
|
from alerts.models import Alert
|
||||||
|
|
||||||
|
# https://github.com/amidaware/tacticalrmm/issues/484
|
||||||
agents = Agent.objects.exclude(last_seen__isnull=True).filter(
|
agents = Agent.objects.exclude(last_seen__isnull=True).filter(
|
||||||
last_seen__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
last_seen__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||||
)
|
)
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
if agent.agentchecks.exists():
|
for check in agent.get_checks_with_policies():
|
||||||
for check in agent.agentchecks.all():
|
|
||||||
# reset check status
|
# reset check status
|
||||||
check.status = "passing"
|
if check.check_result:
|
||||||
check.save(update_fields=["status"])
|
check.check_result.status = "passing"
|
||||||
if check.alert.filter(resolved=False).exists():
|
check.check_result.save(update_fields=["status"])
|
||||||
check.alert.get(resolved=False).resolve()
|
if check.alert.filter(agent=agent, resolved=False).exists():
|
||||||
|
alert = Alert.create_or_return_check_alert(check, agent=agent)
|
||||||
|
if alert:
|
||||||
|
alert.resolve()
|
||||||
|
|
||||||
# reset overdue alerts
|
# reset overdue alerts
|
||||||
agent.overdue_email_alert = False
|
agent.overdue_email_alert = False
|
||||||
@@ -321,25 +309,6 @@ def clear_faults_task(older_than_days: int) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def get_wmi_task() -> None:
|
|
||||||
agents = Agent.objects.only(
|
|
||||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
|
||||||
)
|
|
||||||
ids = [i.agent_id for i in agents if i.status == "online"]
|
|
||||||
run_nats_api_cmd("wmi", ids, timeout=45)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def agent_checkin_task() -> None:
|
|
||||||
run_nats_api_cmd("checkin", timeout=30)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def agent_getinfo_task() -> None:
|
|
||||||
run_nats_api_cmd("agentinfo", timeout=30)
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def prune_agent_history(older_than_days: int) -> str:
|
def prune_agent_history(older_than_days: int) -> str:
|
||||||
from .models import AgentHistory
|
from .models import AgentHistory
|
||||||
@@ -349,45 +318,3 @@ def prune_agent_history(older_than_days: int) -> str:
|
|||||||
).delete()
|
).delete()
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
|
||||||
def handle_agents_task() -> None:
|
|
||||||
q = Agent.objects.prefetch_related("pendingactions", "autotasks").only(
|
|
||||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
|
||||||
)
|
|
||||||
agents = [
|
|
||||||
i
|
|
||||||
for i in q
|
|
||||||
if pyver.parse(i.version) >= pyver.parse("1.6.0") and i.status == "online"
|
|
||||||
]
|
|
||||||
for agent in agents:
|
|
||||||
# change agent update pending status to completed if agent has just updated
|
|
||||||
if (
|
|
||||||
pyver.parse(agent.version) == pyver.parse(settings.LATEST_AGENT_VER)
|
|
||||||
and agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).exists()
|
|
||||||
):
|
|
||||||
agent.pendingactions.filter(
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).update(status="completed")
|
|
||||||
|
|
||||||
# sync scheduled tasks
|
|
||||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
|
||||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
|
||||||
|
|
||||||
for task in tasks:
|
|
||||||
if task.sync_status == "pendingdeletion":
|
|
||||||
task.delete_task_on_agent()
|
|
||||||
elif task.sync_status == "initial":
|
|
||||||
task.modify_task_on_agent()
|
|
||||||
elif task.sync_status == "notsynced":
|
|
||||||
task.create_task_on_agent()
|
|
||||||
|
|
||||||
# handles any alerting actions
|
|
||||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
|
||||||
try:
|
|
||||||
Alert.handle_alert_resolve(agent)
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,33 +1,43 @@
|
|||||||
|
from autotasks.views import GetAddAutoTasks
|
||||||
|
from checks.views import GetAddChecks
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
from logs.views import PendingActions
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("listagents/", views.AgentsTableList.as_view()),
|
# agent views
|
||||||
path("listagentsnodetail/", views.list_agents_no_detail),
|
path("", views.GetAgents.as_view()),
|
||||||
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
path("<agent:agent_id>/", views.GetUpdateDeleteAgent.as_view()),
|
||||||
path("overdueaction/", views.overdue_action),
|
path("<agent:agent_id>/cmd/", views.send_raw_cmd),
|
||||||
path("sendrawcmd/", views.send_raw_cmd),
|
path("<agent:agent_id>/runscript/", views.run_script),
|
||||||
path("<pk>/agentdetail/", views.agent_detail),
|
path("<agent:agent_id>/wmi/", views.WMI.as_view()),
|
||||||
path("<int:pk>/meshcentral/", views.meshcentral),
|
path("<agent:agent_id>/recover/", views.recover),
|
||||||
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
|
path("<agent:agent_id>/reboot/", views.Reboot.as_view()),
|
||||||
path("uninstall/", views.uninstall),
|
path("<agent:agent_id>/ping/", views.ping),
|
||||||
path("editagent/", views.edit_agent),
|
# alias for checks get view
|
||||||
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
|
path("<agent:agent_id>/checks/", GetAddChecks.as_view()),
|
||||||
path("getagentversions/", views.get_agent_versions),
|
# alias for autotasks get view
|
||||||
path("updateagents/", views.update_agents),
|
path("<agent:agent_id>/tasks/", GetAddAutoTasks.as_view()),
|
||||||
path("<pk>/getprocs/", views.get_processes),
|
# alias for pending actions get view
|
||||||
path("<pk>/<pid>/killproc/", views.kill_proc),
|
path("<agent:agent_id>/pendingactions/", PendingActions.as_view()),
|
||||||
path("reboot/", views.Reboot.as_view()),
|
# agent remote background
|
||||||
path("installagent/", views.install_agent),
|
path("<agent:agent_id>/meshcentral/", views.AgentMeshCentral.as_view()),
|
||||||
path("<int:pk>/ping/", views.ping),
|
path("<agent:agent_id>/meshcentral/recover/", views.AgentMeshCentral.as_view()),
|
||||||
path("recover/", views.recover),
|
path("<agent:agent_id>/processes/", views.AgentProcesses.as_view()),
|
||||||
path("runscript/", views.run_script),
|
path("<agent:agent_id>/processes/<int:pid>/", views.AgentProcesses.as_view()),
|
||||||
path("<int:pk>/recovermesh/", views.recover_mesh),
|
path("<agent:agent_id>/eventlog/<str:logtype>/<int:days>/", views.get_event_log),
|
||||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
# agent history
|
||||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
path("history/", views.AgentHistoryView.as_view()),
|
||||||
path("bulk/", views.bulk),
|
path("<agent:agent_id>/history/", views.AgentHistoryView.as_view()),
|
||||||
path("maintenance/", views.agent_maintenance),
|
# agent notes
|
||||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
path("notes/", views.GetAddNotes.as_view()),
|
||||||
path("history/<int:pk>/", views.AgentHistoryView.as_view()),
|
path("notes/<int:pk>/", views.GetEditDeleteNote.as_view()),
|
||||||
|
path("<agent:agent_id>/notes/", views.GetAddNotes.as_view()),
|
||||||
|
# bulk actions
|
||||||
|
path("maintenance/bulk/", views.agent_maintenance),
|
||||||
|
path("actions/bulk/", views.bulk),
|
||||||
|
path("versions/", views.get_agent_versions),
|
||||||
|
path("update/", views.update_agents),
|
||||||
|
path("installer/", views.install_agent),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,37 +1,92 @@
|
|||||||
import random
|
import asyncio
|
||||||
|
import tempfile
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
import requests
|
from core.models import CodeSignToken
|
||||||
|
from core.utils import get_mesh_device_id, get_mesh_ws_url, get_core_settings
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.http import FileResponse
|
||||||
|
|
||||||
|
from tacticalrmm.constants import MeshAgentIdent
|
||||||
|
|
||||||
|
|
||||||
def get_exegen_url() -> str:
|
def get_agent_url(arch: str, plat: str) -> str:
|
||||||
urls: list[str] = settings.EXE_GEN_URLS
|
|
||||||
for url in urls:
|
|
||||||
try:
|
|
||||||
r = requests.get(url, timeout=10)
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if r.status_code == 200:
|
if plat == "windows":
|
||||||
return url
|
endpoint = "winagents"
|
||||||
|
dl_url = settings.DL_32 if arch == "32" else settings.DL_64
|
||||||
|
else:
|
||||||
|
endpoint = "linuxagents"
|
||||||
|
dl_url = ""
|
||||||
|
|
||||||
return random.choice(urls)
|
token = CodeSignToken.objects.first()
|
||||||
|
if not token:
|
||||||
|
return dl_url
|
||||||
|
|
||||||
|
if token.is_valid:
|
||||||
def get_winagent_url(arch: str) -> str:
|
base_url = settings.EXE_GEN_URL + f"/api/v1/{endpoint}/?"
|
||||||
from core.models import CodeSignToken
|
|
||||||
|
|
||||||
try:
|
|
||||||
codetoken = CodeSignToken.objects.first().token
|
|
||||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
|
||||||
params = {
|
params = {
|
||||||
"version": settings.LATEST_AGENT_VER,
|
"version": settings.LATEST_AGENT_VER,
|
||||||
"arch": arch,
|
"arch": arch,
|
||||||
"token": codetoken,
|
"token": token.token,
|
||||||
}
|
}
|
||||||
dl_url = base_url + urllib.parse.urlencode(params)
|
dl_url = base_url + urllib.parse.urlencode(params)
|
||||||
except:
|
|
||||||
dl_url = settings.DL_64 if arch == "64" else settings.DL_32
|
|
||||||
|
|
||||||
return dl_url
|
return dl_url
|
||||||
|
|
||||||
|
|
||||||
|
def generate_linux_install(
|
||||||
|
client: str,
|
||||||
|
site: str,
|
||||||
|
agent_type: str,
|
||||||
|
arch: str,
|
||||||
|
token: str,
|
||||||
|
api: str,
|
||||||
|
download_url: str,
|
||||||
|
) -> FileResponse:
|
||||||
|
|
||||||
|
match arch:
|
||||||
|
case "amd64":
|
||||||
|
arch_id = MeshAgentIdent.LINUX64
|
||||||
|
case "386":
|
||||||
|
arch_id = MeshAgentIdent.LINUX32
|
||||||
|
case "arm64":
|
||||||
|
arch_id = MeshAgentIdent.LINUX_ARM_64
|
||||||
|
case "arm":
|
||||||
|
arch_id = MeshAgentIdent.LINUX_ARM_HF
|
||||||
|
case _:
|
||||||
|
arch_id = "not_found"
|
||||||
|
|
||||||
|
core = get_core_settings()
|
||||||
|
|
||||||
|
uri = get_mesh_ws_url()
|
||||||
|
mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group))
|
||||||
|
mesh_dl = (
|
||||||
|
f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=0&meshinstall={arch_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
sh = settings.LINUX_AGENT_SCRIPT
|
||||||
|
with open(sh, "r") as f:
|
||||||
|
text = f.read()
|
||||||
|
|
||||||
|
replace = {
|
||||||
|
"agentDLChange": download_url,
|
||||||
|
"meshDLChange": mesh_dl,
|
||||||
|
"clientIDChange": client,
|
||||||
|
"siteIDChange": site,
|
||||||
|
"agentTypeChange": agent_type,
|
||||||
|
"tokenChange": token,
|
||||||
|
"apiURLChange": api,
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, j in replace.items():
|
||||||
|
text = text.replace(i, j)
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile() as fp:
|
||||||
|
with open(fp.name, "w") as f:
|
||||||
|
f.write(text)
|
||||||
|
f.write("\n")
|
||||||
|
|
||||||
|
return FileResponse(
|
||||||
|
open(fp.name, "rb"), as_attachment=True, filename="linux_agent_install.sh"
|
||||||
|
)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
23
api/tacticalrmm/alerts/migrations/0010_auto_20210917_1954.py
Normal file
23
api/tacticalrmm/alerts/migrations/0010_auto_20210917_1954.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-09-17 19:54
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("alerts", "0009_auto_20210721_1810"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="alerttemplate",
|
||||||
|
name="created_by",
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="alerttemplate",
|
||||||
|
name="modified_by",
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
24
api/tacticalrmm/alerts/migrations/0011_alter_alert_agent.py
Normal file
24
api/tacticalrmm/alerts/migrations/0011_alter_alert_agent.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 4.0.3 on 2022-04-07 17:28
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
def delete_alerts_without_agent(apps, schema):
|
||||||
|
Alert = apps.get_model("alerts", "Alert")
|
||||||
|
|
||||||
|
Alert.objects.filter(agent=None).delete()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("agents", "0047_alter_agent_plat_alter_agent_site"),
|
||||||
|
("alerts", "0010_auto_20210917_1954"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(
|
||||||
|
delete_alerts_without_agent, reverse_code=migrations.RunPython.noop
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,19 +1,21 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from typing import TYPE_CHECKING, Union
|
from typing import TYPE_CHECKING, Union, Optional, Dict, Any, List, cast
|
||||||
|
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
|
||||||
from logs.models import BaseAuditModel, DebugLog
|
from logs.models import BaseAuditModel, DebugLog
|
||||||
|
|
||||||
|
from tacticalrmm.models import PermissionQuerySet
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask, TaskResult
|
||||||
from checks.models import Check
|
from checks.models import Check, CheckResult
|
||||||
|
from clients.models import Client, Site
|
||||||
|
|
||||||
|
|
||||||
SEVERITY_CHOICES = [
|
SEVERITY_CHOICES = [
|
||||||
@@ -31,6 +33,8 @@ ALERT_TYPE_CHOICES = [
|
|||||||
|
|
||||||
|
|
||||||
class Alert(models.Model):
|
class Alert(models.Model):
|
||||||
|
objects = PermissionQuerySet.as_manager()
|
||||||
|
|
||||||
agent = models.ForeignKey(
|
agent = models.ForeignKey(
|
||||||
"agents.Agent",
|
"agents.Agent",
|
||||||
related_name="agent",
|
related_name="agent",
|
||||||
@@ -80,62 +84,193 @@ class Alert(models.Model):
|
|||||||
max_length=100, null=True, blank=True
|
max_length=100, null=True, blank=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
return self.message
|
return self.message
|
||||||
|
|
||||||
def resolve(self):
|
@property
|
||||||
|
def assigned_agent(self) -> "Agent":
|
||||||
|
return self.agent
|
||||||
|
|
||||||
|
@property
|
||||||
|
def site(self) -> "Site":
|
||||||
|
return self.agent.site
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client(self) -> "Client":
|
||||||
|
return self.agent.client
|
||||||
|
|
||||||
|
def resolve(self) -> None:
|
||||||
self.resolved = True
|
self.resolved = True
|
||||||
self.resolved_on = djangotime.now()
|
self.resolved_on = djangotime.now()
|
||||||
self.snoozed = False
|
self.snoozed = False
|
||||||
self.snooze_until = None
|
self.snooze_until = None
|
||||||
self.save()
|
self.save(update_fields=["resolved", "resolved_on", "snoozed", "snooze_until"])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_or_return_availability_alert(cls, agent):
|
def create_or_return_availability_alert(
|
||||||
if not cls.objects.filter(agent=agent, resolved=False).exists():
|
cls, agent: Agent, skip_create: bool = False
|
||||||
return cls.objects.create(
|
) -> Optional[Alert]:
|
||||||
|
if not cls.objects.filter(
|
||||||
|
agent=agent, alert_type="availability", resolved=False
|
||||||
|
).exists():
|
||||||
|
if skip_create:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return cast(
|
||||||
|
Alert,
|
||||||
|
cls.objects.create(
|
||||||
agent=agent,
|
agent=agent,
|
||||||
alert_type="availability",
|
alert_type="availability",
|
||||||
severity="error",
|
severity="error",
|
||||||
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return cls.objects.get(agent=agent, resolved=False)
|
try:
|
||||||
|
return cast(
|
||||||
|
Alert,
|
||||||
|
cls.objects.get(
|
||||||
|
agent=agent, alert_type="availability", resolved=False
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except cls.MultipleObjectsReturned:
|
||||||
|
alerts = cls.objects.filter(
|
||||||
|
agent=agent, alert_type="availability", resolved=False
|
||||||
|
)
|
||||||
|
|
||||||
|
last_alert = cast(Alert, alerts.last())
|
||||||
|
|
||||||
|
# cycle through other alerts and resolve
|
||||||
|
for alert in alerts:
|
||||||
|
if alert.id != last_alert.pk:
|
||||||
|
alert.resolve()
|
||||||
|
|
||||||
|
return last_alert
|
||||||
|
except cls.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_or_return_check_alert(cls, check):
|
def create_or_return_check_alert(
|
||||||
|
cls,
|
||||||
|
check: "Check",
|
||||||
|
agent: "Agent",
|
||||||
|
alert_severity: Optional[str] = None,
|
||||||
|
skip_create: bool = False,
|
||||||
|
) -> "Optional[Alert]":
|
||||||
|
|
||||||
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
|
# need to pass agent if the check is a policy
|
||||||
return cls.objects.create(
|
if not cls.objects.filter(
|
||||||
assigned_check=check,
|
assigned_check=check,
|
||||||
|
agent=agent,
|
||||||
|
resolved=False,
|
||||||
|
).exists():
|
||||||
|
if skip_create:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return cast(
|
||||||
|
Alert,
|
||||||
|
cls.objects.create(
|
||||||
|
assigned_check=check,
|
||||||
|
agent=agent,
|
||||||
alert_type="check",
|
alert_type="check",
|
||||||
severity=check.alert_severity,
|
severity=check.alert_severity
|
||||||
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
if check.check_type
|
||||||
|
not in ["memory", "cpuload", "diskspace", "script"]
|
||||||
|
else alert_severity,
|
||||||
|
message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return cls.objects.get(assigned_check=check, resolved=False)
|
try:
|
||||||
|
return cast(
|
||||||
|
Alert,
|
||||||
|
cls.objects.get(
|
||||||
|
assigned_check=check,
|
||||||
|
agent=agent,
|
||||||
|
resolved=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except cls.MultipleObjectsReturned:
|
||||||
|
alerts = cls.objects.filter(
|
||||||
|
assigned_check=check,
|
||||||
|
agent=agent,
|
||||||
|
resolved=False,
|
||||||
|
)
|
||||||
|
last_alert = cast(Alert, alerts.last())
|
||||||
|
|
||||||
|
# cycle through other alerts and resolve
|
||||||
|
for alert in alerts:
|
||||||
|
if alert.id != last_alert.pk:
|
||||||
|
alert.resolve()
|
||||||
|
|
||||||
|
return last_alert
|
||||||
|
except cls.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_or_return_task_alert(cls, task):
|
def create_or_return_task_alert(
|
||||||
|
cls,
|
||||||
|
task: "AutomatedTask",
|
||||||
|
agent: "Agent",
|
||||||
|
skip_create: bool = False,
|
||||||
|
) -> "Optional[Alert]":
|
||||||
|
|
||||||
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
|
if not cls.objects.filter(
|
||||||
return cls.objects.create(
|
|
||||||
assigned_task=task,
|
assigned_task=task,
|
||||||
|
agent=agent,
|
||||||
|
resolved=False,
|
||||||
|
).exists():
|
||||||
|
if skip_create:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return cast(
|
||||||
|
Alert,
|
||||||
|
cls.objects.create(
|
||||||
|
assigned_task=task,
|
||||||
|
agent=agent,
|
||||||
alert_type="task",
|
alert_type="task",
|
||||||
severity=task.alert_severity,
|
severity=task.alert_severity,
|
||||||
message=f"{task.agent.hostname} has task: {task.name} that failed.",
|
message=f"{agent.hostname} has task: {task.name} that failed.",
|
||||||
hidden=True,
|
hidden=True,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return cls.objects.get(assigned_task=task, resolved=False)
|
try:
|
||||||
|
return cast(
|
||||||
|
Alert,
|
||||||
|
cls.objects.get(
|
||||||
|
assigned_task=task,
|
||||||
|
agent=agent,
|
||||||
|
resolved=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except cls.MultipleObjectsReturned:
|
||||||
|
alerts = cls.objects.filter(
|
||||||
|
assigned_task=task,
|
||||||
|
agent=agent,
|
||||||
|
resolved=False,
|
||||||
|
)
|
||||||
|
last_alert = cast(Alert, alerts.last())
|
||||||
|
|
||||||
|
# cycle through other alerts and resolve
|
||||||
|
for alert in alerts:
|
||||||
|
if alert.id != last_alert.pk:
|
||||||
|
alert.resolve()
|
||||||
|
|
||||||
|
return last_alert
|
||||||
|
except cls.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
def handle_alert_failure(
|
||||||
|
cls, instance: Union[Agent, TaskResult, CheckResult]
|
||||||
|
) -> None:
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import TaskResult
|
||||||
from checks.models import Check
|
from checks.models import CheckResult
|
||||||
|
|
||||||
# set variables
|
# set variables
|
||||||
dashboard_severities = None
|
dashboard_severities = None
|
||||||
@@ -147,6 +282,7 @@ class Alert(models.Model):
|
|||||||
alert_interval = None
|
alert_interval = None
|
||||||
email_task = None
|
email_task = None
|
||||||
text_task = None
|
text_task = None
|
||||||
|
run_script_action = None
|
||||||
|
|
||||||
# check what the instance passed is
|
# check what the instance passed is
|
||||||
if isinstance(instance, Agent):
|
if isinstance(instance, Agent):
|
||||||
@@ -174,16 +310,7 @@ class Alert(models.Model):
|
|||||||
alert_interval = alert_template.agent_periodic_alert_days
|
alert_interval = alert_template.agent_periodic_alert_days
|
||||||
run_script_action = alert_template.agent_script_actions
|
run_script_action = alert_template.agent_script_actions
|
||||||
|
|
||||||
if instance.should_create_alert(alert_template):
|
elif isinstance(instance, CheckResult):
|
||||||
alert = cls.create_or_return_availability_alert(instance)
|
|
||||||
else:
|
|
||||||
# check if there is an alert that exists
|
|
||||||
if cls.objects.filter(agent=instance, resolved=False).exists():
|
|
||||||
alert = cls.objects.get(agent=instance, resolved=False)
|
|
||||||
else:
|
|
||||||
alert = None
|
|
||||||
|
|
||||||
elif isinstance(instance, Check):
|
|
||||||
from checks.tasks import (
|
from checks.tasks import (
|
||||||
handle_check_email_alert_task,
|
handle_check_email_alert_task,
|
||||||
handle_check_sms_alert_task,
|
handle_check_sms_alert_task,
|
||||||
@@ -192,12 +319,17 @@ class Alert(models.Model):
|
|||||||
email_task = handle_check_email_alert_task
|
email_task = handle_check_email_alert_task
|
||||||
text_task = handle_check_sms_alert_task
|
text_task = handle_check_sms_alert_task
|
||||||
|
|
||||||
email_alert = instance.email_alert
|
email_alert = instance.assigned_check.email_alert
|
||||||
text_alert = instance.text_alert
|
text_alert = instance.assigned_check.text_alert
|
||||||
dashboard_alert = instance.dashboard_alert
|
dashboard_alert = instance.assigned_check.dashboard_alert
|
||||||
alert_template = instance.agent.alert_template
|
alert_template = instance.agent.alert_template
|
||||||
maintenance_mode = instance.agent.maintenance_mode
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
alert_severity = instance.alert_severity
|
alert_severity = (
|
||||||
|
instance.assigned_check.alert_severity
|
||||||
|
if instance.assigned_check.check_type
|
||||||
|
not in ["memory", "cpuload", "diskspace", "script"]
|
||||||
|
else instance.alert_severity
|
||||||
|
)
|
||||||
agent = instance.agent
|
agent = instance.agent
|
||||||
|
|
||||||
# set alert_template settings
|
# set alert_template settings
|
||||||
@@ -211,27 +343,18 @@ class Alert(models.Model):
|
|||||||
alert_interval = alert_template.check_periodic_alert_days
|
alert_interval = alert_template.check_periodic_alert_days
|
||||||
run_script_action = alert_template.check_script_actions
|
run_script_action = alert_template.check_script_actions
|
||||||
|
|
||||||
if instance.should_create_alert(alert_template):
|
elif isinstance(instance, TaskResult):
|
||||||
alert = cls.create_or_return_check_alert(instance)
|
|
||||||
else:
|
|
||||||
# check if there is an alert that exists
|
|
||||||
if cls.objects.filter(assigned_check=instance, resolved=False).exists():
|
|
||||||
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
|
||||||
else:
|
|
||||||
alert = None
|
|
||||||
|
|
||||||
elif isinstance(instance, AutomatedTask):
|
|
||||||
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
|
||||||
|
|
||||||
email_task = handle_task_email_alert
|
email_task = handle_task_email_alert
|
||||||
text_task = handle_task_sms_alert
|
text_task = handle_task_sms_alert
|
||||||
|
|
||||||
email_alert = instance.email_alert
|
email_alert = instance.task.email_alert
|
||||||
text_alert = instance.text_alert
|
text_alert = instance.task.text_alert
|
||||||
dashboard_alert = instance.dashboard_alert
|
dashboard_alert = instance.task.dashboard_alert
|
||||||
alert_template = instance.agent.alert_template
|
alert_template = instance.agent.alert_template
|
||||||
maintenance_mode = instance.agent.maintenance_mode
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
alert_severity = instance.alert_severity
|
alert_severity = instance.task.alert_severity
|
||||||
agent = instance.agent
|
agent = instance.agent
|
||||||
|
|
||||||
# set alert_template settings
|
# set alert_template settings
|
||||||
@@ -245,22 +368,16 @@ class Alert(models.Model):
|
|||||||
alert_interval = alert_template.task_periodic_alert_days
|
alert_interval = alert_template.task_periodic_alert_days
|
||||||
run_script_action = alert_template.task_script_actions
|
run_script_action = alert_template.task_script_actions
|
||||||
|
|
||||||
if instance.should_create_alert(alert_template):
|
|
||||||
alert = cls.create_or_return_task_alert(instance)
|
|
||||||
else:
|
|
||||||
# check if there is an alert that exists
|
|
||||||
if cls.objects.filter(assigned_task=instance, resolved=False).exists():
|
|
||||||
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
|
||||||
else:
|
|
||||||
alert = None
|
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
alert = instance.get_or_create_alert_if_needed(alert_template)
|
||||||
|
|
||||||
# return if agent is in maintenance mode
|
# return if agent is in maintenance mode
|
||||||
if maintenance_mode or not alert:
|
if not alert or maintenance_mode:
|
||||||
return
|
return
|
||||||
|
|
||||||
# check if alert severity changed on check and update the alert
|
# check if alert severity changed and update the alert
|
||||||
if alert_severity != alert.severity:
|
if alert_severity != alert.severity:
|
||||||
alert.severity = alert_severity
|
alert.severity = alert_severity
|
||||||
alert.save(update_fields=["severity"])
|
alert.save(update_fields=["severity"])
|
||||||
@@ -269,19 +386,25 @@ class Alert(models.Model):
|
|||||||
if dashboard_alert or always_dashboard:
|
if dashboard_alert or always_dashboard:
|
||||||
|
|
||||||
# check if alert template is set and specific severities are configured
|
# check if alert template is set and specific severities are configured
|
||||||
if alert_template and alert.severity not in dashboard_severities: # type: ignore
|
if (
|
||||||
pass
|
not alert_template
|
||||||
else:
|
or alert_template
|
||||||
|
and dashboard_severities
|
||||||
|
and alert.severity in dashboard_severities
|
||||||
|
):
|
||||||
alert.hidden = False
|
alert.hidden = False
|
||||||
alert.save()
|
alert.save(update_fields=["hidden"])
|
||||||
|
|
||||||
# send email if enabled
|
# send email if enabled
|
||||||
if email_alert or always_email:
|
if email_alert or always_email:
|
||||||
|
|
||||||
# check if alert template is set and specific severities are configured
|
# check if alert template is set and specific severities are configured
|
||||||
if alert_template and alert.severity not in email_severities: # type: ignore
|
if (
|
||||||
pass
|
not alert_template
|
||||||
else:
|
or alert_template
|
||||||
|
and email_severities
|
||||||
|
and alert.severity in email_severities
|
||||||
|
):
|
||||||
email_task.delay(
|
email_task.delay(
|
||||||
pk=alert.pk,
|
pk=alert.pk,
|
||||||
alert_interval=alert_interval,
|
alert_interval=alert_interval,
|
||||||
@@ -291,13 +414,21 @@ class Alert(models.Model):
|
|||||||
if text_alert or always_text:
|
if text_alert or always_text:
|
||||||
|
|
||||||
# check if alert template is set and specific severities are configured
|
# check if alert template is set and specific severities are configured
|
||||||
if alert_template and alert.severity not in text_severities: # type: ignore
|
if (
|
||||||
pass
|
not alert_template
|
||||||
else:
|
or alert_template
|
||||||
|
and text_severities
|
||||||
|
and alert.severity in text_severities
|
||||||
|
):
|
||||||
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
|
||||||
|
|
||||||
# check if any scripts should be run
|
# check if any scripts should be run
|
||||||
if alert_template and alert_template.action and run_script_action and not alert.action_run: # type: ignore
|
if (
|
||||||
|
alert_template
|
||||||
|
and alert_template.action
|
||||||
|
and run_script_action
|
||||||
|
and not alert.action_run
|
||||||
|
):
|
||||||
r = agent.run_script(
|
r = agent.run_script(
|
||||||
scriptpk=alert_template.action.pk,
|
scriptpk=alert_template.action.pk,
|
||||||
args=alert.parse_script_args(alert_template.action_args),
|
args=alert.parse_script_args(alert_template.action_args),
|
||||||
@@ -308,7 +439,7 @@ class Alert(models.Model):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# command was successful
|
# command was successful
|
||||||
if type(r) == dict:
|
if isinstance(r, dict):
|
||||||
alert.action_retcode = r["retcode"]
|
alert.action_retcode = r["retcode"]
|
||||||
alert.action_stdout = r["stdout"]
|
alert.action_stdout = r["stdout"]
|
||||||
alert.action_stderr = r["stderr"]
|
alert.action_stderr = r["stderr"]
|
||||||
@@ -323,16 +454,19 @@ class Alert(models.Model):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
|
def handle_alert_resolve(
|
||||||
|
cls, instance: Union[Agent, TaskResult, CheckResult]
|
||||||
|
) -> None:
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import TaskResult
|
||||||
from checks.models import Check
|
from checks.models import CheckResult
|
||||||
|
|
||||||
# set variables
|
# set variables
|
||||||
email_on_resolved = False
|
email_on_resolved = False
|
||||||
text_on_resolved = False
|
text_on_resolved = False
|
||||||
resolved_email_task = None
|
resolved_email_task = None
|
||||||
resolved_text_task = None
|
resolved_text_task = None
|
||||||
|
run_script_action = None
|
||||||
|
|
||||||
# check what the instance passed is
|
# check what the instance passed is
|
||||||
if isinstance(instance, Agent):
|
if isinstance(instance, Agent):
|
||||||
@@ -342,7 +476,6 @@ class Alert(models.Model):
|
|||||||
resolved_text_task = agent_recovery_sms_task
|
resolved_text_task = agent_recovery_sms_task
|
||||||
|
|
||||||
alert_template = instance.alert_template
|
alert_template = instance.alert_template
|
||||||
alert = cls.objects.get(agent=instance, resolved=False)
|
|
||||||
maintenance_mode = instance.maintenance_mode
|
maintenance_mode = instance.maintenance_mode
|
||||||
agent = instance
|
agent = instance
|
||||||
|
|
||||||
@@ -351,7 +484,12 @@ class Alert(models.Model):
|
|||||||
text_on_resolved = alert_template.agent_text_on_resolved
|
text_on_resolved = alert_template.agent_text_on_resolved
|
||||||
run_script_action = alert_template.agent_script_actions
|
run_script_action = alert_template.agent_script_actions
|
||||||
|
|
||||||
elif isinstance(instance, Check):
|
if agent.overdue_email_alert:
|
||||||
|
email_on_resolved = True
|
||||||
|
if agent.overdue_text_alert:
|
||||||
|
text_on_resolved = True
|
||||||
|
|
||||||
|
elif isinstance(instance, CheckResult):
|
||||||
from checks.tasks import (
|
from checks.tasks import (
|
||||||
handle_resolved_check_email_alert_task,
|
handle_resolved_check_email_alert_task,
|
||||||
handle_resolved_check_sms_alert_task,
|
handle_resolved_check_sms_alert_task,
|
||||||
@@ -361,7 +499,6 @@ class Alert(models.Model):
|
|||||||
resolved_text_task = handle_resolved_check_sms_alert_task
|
resolved_text_task = handle_resolved_check_sms_alert_task
|
||||||
|
|
||||||
alert_template = instance.agent.alert_template
|
alert_template = instance.agent.alert_template
|
||||||
alert = cls.objects.get(assigned_check=instance, resolved=False)
|
|
||||||
maintenance_mode = instance.agent.maintenance_mode
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
agent = instance.agent
|
agent = instance.agent
|
||||||
|
|
||||||
@@ -370,7 +507,7 @@ class Alert(models.Model):
|
|||||||
text_on_resolved = alert_template.check_text_on_resolved
|
text_on_resolved = alert_template.check_text_on_resolved
|
||||||
run_script_action = alert_template.check_script_actions
|
run_script_action = alert_template.check_script_actions
|
||||||
|
|
||||||
elif isinstance(instance, AutomatedTask):
|
elif isinstance(instance, TaskResult):
|
||||||
from autotasks.tasks import (
|
from autotasks.tasks import (
|
||||||
handle_resolved_task_email_alert,
|
handle_resolved_task_email_alert,
|
||||||
handle_resolved_task_sms_alert,
|
handle_resolved_task_sms_alert,
|
||||||
@@ -380,7 +517,6 @@ class Alert(models.Model):
|
|||||||
resolved_text_task = handle_resolved_task_sms_alert
|
resolved_text_task = handle_resolved_task_sms_alert
|
||||||
|
|
||||||
alert_template = instance.agent.alert_template
|
alert_template = instance.agent.alert_template
|
||||||
alert = cls.objects.get(assigned_task=instance, resolved=False)
|
|
||||||
maintenance_mode = instance.agent.maintenance_mode
|
maintenance_mode = instance.agent.maintenance_mode
|
||||||
agent = instance.agent
|
agent = instance.agent
|
||||||
|
|
||||||
@@ -392,8 +528,10 @@ class Alert(models.Model):
|
|||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
alert = instance.get_or_create_alert_if_needed(alert_template)
|
||||||
|
|
||||||
# return if agent is in maintenance mode
|
# return if agent is in maintenance mode
|
||||||
if maintenance_mode:
|
if not alert or maintenance_mode:
|
||||||
return
|
return
|
||||||
|
|
||||||
alert.resolve()
|
alert.resolve()
|
||||||
@@ -410,7 +548,7 @@ class Alert(models.Model):
|
|||||||
if (
|
if (
|
||||||
alert_template
|
alert_template
|
||||||
and alert_template.resolved_action
|
and alert_template.resolved_action
|
||||||
and run_script_action # type: ignore
|
and run_script_action
|
||||||
and not alert.resolved_action_run
|
and not alert.resolved_action_run
|
||||||
):
|
):
|
||||||
r = agent.run_script(
|
r = agent.run_script(
|
||||||
@@ -423,7 +561,7 @@ class Alert(models.Model):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# command was successful
|
# command was successful
|
||||||
if type(r) == dict:
|
if isinstance(r, dict):
|
||||||
alert.resolved_action_retcode = r["retcode"]
|
alert.resolved_action_retcode = r["retcode"]
|
||||||
alert.resolved_action_stdout = r["stdout"]
|
alert.resolved_action_stdout = r["stdout"]
|
||||||
alert.resolved_action_stderr = r["stderr"]
|
alert.resolved_action_stderr = r["stderr"]
|
||||||
@@ -439,7 +577,7 @@ class Alert(models.Model):
|
|||||||
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
|
||||||
)
|
)
|
||||||
|
|
||||||
def parse_script_args(self, args: list[str]):
|
def parse_script_args(self, args: List[str]) -> List[str]:
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
return []
|
return []
|
||||||
@@ -453,15 +591,16 @@ class Alert(models.Model):
|
|||||||
if match:
|
if match:
|
||||||
name = match.group(1)
|
name = match.group(1)
|
||||||
|
|
||||||
if hasattr(self, name):
|
# check if attr exists and isn't a function
|
||||||
|
if hasattr(self, name) and not callable(getattr(self, name)):
|
||||||
value = f"'{getattr(self, name)}'"
|
value = f"'{getattr(self, name)}'"
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
DebugLog.error(log_type="scripting", message=e)
|
DebugLog.error(log_type="scripting", message=str(e))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -591,11 +730,22 @@ class AlertTemplate(BaseAuditModel):
|
|||||||
"agents.Agent", related_name="alert_exclusions", blank=True
|
"agents.Agent", related_name="alert_exclusions", blank=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
def is_agent_excluded(self, agent: "Agent") -> bool:
|
||||||
|
return (
|
||||||
|
agent in self.excluded_agents.all()
|
||||||
|
or agent.site in self.excluded_sites.all()
|
||||||
|
or agent.client in self.excluded_clients.all()
|
||||||
|
or agent.monitoring_type == "workstation"
|
||||||
|
and self.exclude_workstations
|
||||||
|
or agent.monitoring_type == "server"
|
||||||
|
and self.exclude_servers
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(alert_template):
|
def serialize(alert_template: AlertTemplate) -> Dict[str, Any]:
|
||||||
# serializes the agent and returns json
|
# serializes the agent and returns json
|
||||||
from .serializers import AlertTemplateAuditSerializer
|
from .serializers import AlertTemplateAuditSerializer
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,54 @@
|
|||||||
|
from django.shortcuts import get_object_or_404
|
||||||
from rest_framework import permissions
|
from rest_framework import permissions
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||||
|
|
||||||
from tacticalrmm.permissions import _has_perm
|
if TYPE_CHECKING:
|
||||||
|
from accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
class ManageAlertsPerms(permissions.BasePermission):
|
def _has_perm_on_alert(user: "User", id: int) -> bool:
|
||||||
def has_permission(self, r, view):
|
from alerts.models import Alert
|
||||||
if r.method == "GET" or r.method == "PATCH":
|
|
||||||
|
role = user.role
|
||||||
|
if user.is_superuser or (role and getattr(role, "is_superuser")):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
# make sure non-superusers with empty roles aren't permitted
|
||||||
|
elif not role:
|
||||||
|
return False
|
||||||
|
|
||||||
|
alert = get_object_or_404(Alert, id=id)
|
||||||
|
|
||||||
|
if alert.agent:
|
||||||
|
agent_id = alert.agent.agent_id
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _has_perm_on_agent(user, agent_id)
|
||||||
|
|
||||||
|
|
||||||
|
class AlertPerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view) -> bool:
|
||||||
|
if r.method == "GET" or r.method == "PATCH":
|
||||||
|
if "pk" in view.kwargs.keys():
|
||||||
|
return _has_perm(r, "can_list_alerts") and _has_perm_on_alert(
|
||||||
|
r.user, view.kwargs["pk"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return _has_perm(r, "can_list_alerts")
|
||||||
|
else:
|
||||||
|
if "pk" in view.kwargs.keys():
|
||||||
|
return _has_perm(r, "can_manage_alerts") and _has_perm_on_alert(
|
||||||
|
r.user, view.kwargs["pk"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
return _has_perm(r, "can_manage_alerts")
|
return _has_perm(r, "can_manage_alerts")
|
||||||
|
|
||||||
|
|
||||||
|
class AlertTemplatePerms(permissions.BasePermission):
|
||||||
|
def has_permission(self, r, view) -> bool:
|
||||||
|
if r.method == "GET":
|
||||||
|
return _has_perm(r, "can_list_alerttemplates")
|
||||||
|
else:
|
||||||
|
return _has_perm(r, "can_manage_alerttemplates")
|
||||||
|
|||||||
@@ -1,88 +1,18 @@
|
|||||||
|
from automation.serializers import PolicySerializer
|
||||||
|
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
||||||
from rest_framework.fields import SerializerMethodField
|
from rest_framework.fields import SerializerMethodField
|
||||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||||
|
|
||||||
from automation.serializers import PolicySerializer
|
|
||||||
from clients.serializers import ClientSerializer, SiteSerializer
|
|
||||||
from tacticalrmm.utils import get_default_timezone
|
|
||||||
|
|
||||||
from .models import Alert, AlertTemplate
|
from .models import Alert, AlertTemplate
|
||||||
|
|
||||||
|
|
||||||
class AlertSerializer(ModelSerializer):
|
class AlertSerializer(ModelSerializer):
|
||||||
|
|
||||||
hostname = SerializerMethodField(read_only=True)
|
hostname = ReadOnlyField(source="assigned_agent.hostname")
|
||||||
client = SerializerMethodField(read_only=True)
|
agent_id = ReadOnlyField(source="assigned_agent.agent_id")
|
||||||
site = SerializerMethodField(read_only=True)
|
client = ReadOnlyField(source="client.name")
|
||||||
alert_time = SerializerMethodField(read_only=True)
|
site = ReadOnlyField(source="site.name")
|
||||||
resolve_on = SerializerMethodField(read_only=True)
|
alert_time = ReadOnlyField()
|
||||||
snoozed_until = SerializerMethodField(read_only=True)
|
|
||||||
|
|
||||||
def get_hostname(self, instance):
|
|
||||||
if instance.alert_type == "availability":
|
|
||||||
return instance.agent.hostname if instance.agent else ""
|
|
||||||
elif instance.alert_type == "check":
|
|
||||||
return (
|
|
||||||
instance.assigned_check.agent.hostname
|
|
||||||
if instance.assigned_check
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
elif instance.alert_type == "task":
|
|
||||||
return (
|
|
||||||
instance.assigned_task.agent.hostname if instance.assigned_task else ""
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def get_client(self, instance):
|
|
||||||
if instance.alert_type == "availability":
|
|
||||||
return instance.agent.client.name if instance.agent else ""
|
|
||||||
elif instance.alert_type == "check":
|
|
||||||
return (
|
|
||||||
instance.assigned_check.agent.client.name
|
|
||||||
if instance.assigned_check
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
elif instance.alert_type == "task":
|
|
||||||
return (
|
|
||||||
instance.assigned_task.agent.client.name
|
|
||||||
if instance.assigned_task
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def get_site(self, instance):
|
|
||||||
if instance.alert_type == "availability":
|
|
||||||
return instance.agent.site.name if instance.agent else ""
|
|
||||||
elif instance.alert_type == "check":
|
|
||||||
return (
|
|
||||||
instance.assigned_check.agent.site.name
|
|
||||||
if instance.assigned_check
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
elif instance.alert_type == "task":
|
|
||||||
return (
|
|
||||||
instance.assigned_task.agent.site.name if instance.assigned_task else ""
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def get_alert_time(self, instance):
|
|
||||||
if instance.alert_time:
|
|
||||||
return instance.alert_time.astimezone(get_default_timezone()).timestamp()
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_resolve_on(self, instance):
|
|
||||||
if instance.resolved_on:
|
|
||||||
return instance.resolved_on.astimezone(get_default_timezone()).timestamp()
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_snoozed_until(self, instance):
|
|
||||||
if instance.snooze_until:
|
|
||||||
return instance.snooze_until.astimezone(get_default_timezone()).timestamp()
|
|
||||||
return None
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Alert
|
model = Alert
|
||||||
@@ -104,17 +34,17 @@ class AlertTemplateSerializer(ModelSerializer):
|
|||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
def get_applied_count(self, instance):
|
def get_applied_count(self, instance):
|
||||||
count = 0
|
return (
|
||||||
count += instance.policies.count()
|
instance.policies.count()
|
||||||
count += instance.clients.count()
|
+ instance.clients.count()
|
||||||
count += instance.sites.count()
|
+ instance.sites.count()
|
||||||
return count
|
)
|
||||||
|
|
||||||
|
|
||||||
class AlertTemplateRelationSerializer(ModelSerializer):
|
class AlertTemplateRelationSerializer(ModelSerializer):
|
||||||
policies = PolicySerializer(read_only=True, many=True)
|
policies = PolicySerializer(read_only=True, many=True)
|
||||||
clients = ClientSerializer(read_only=True, many=True)
|
clients = ClientMinimumSerializer(read_only=True, many=True)
|
||||||
sites = SiteSerializer(read_only=True, many=True)
|
sites = SiteMinimumSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AlertTemplate
|
model = AlertTemplate
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from .models import Alert
|
||||||
|
from agents.models import Agent
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def unsnooze_alerts() -> str:
|
def unsnooze_alerts() -> str:
|
||||||
from .models import Alert
|
|
||||||
|
|
||||||
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||||
snoozed=False, snooze_until=None
|
snoozed=False, snooze_until=None
|
||||||
)
|
)
|
||||||
@@ -14,10 +14,10 @@ def unsnooze_alerts() -> str:
|
|||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def cache_agents_alert_template():
|
def cache_agents_alert_template() -> str:
|
||||||
from agents.models import Agent
|
for agent in Agent.objects.only(
|
||||||
|
"pk", "site", "policy", "alert_template"
|
||||||
for agent in Agent.objects.only("pk"):
|
).select_related("site", "policy", "alert_template"):
|
||||||
agent.set_alert_template()
|
agent.set_alert_template()
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
@@ -25,8 +25,6 @@ def cache_agents_alert_template():
|
|||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def prune_resolved_alerts(older_than_days: int) -> str:
|
def prune_resolved_alerts(older_than_days: int) -> str:
|
||||||
from .models import Alert
|
|
||||||
|
|
||||||
Alert.objects.filter(resolved=True).filter(
|
Alert.objects.filter(resolved=True).filter(
|
||||||
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
|
||||||
).delete()
|
).delete()
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -3,10 +3,10 @@ from django.urls import path
|
|||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("alerts/", views.GetAddAlerts.as_view()),
|
path("", views.GetAddAlerts.as_view()),
|
||||||
|
path("<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
||||||
path("bulk/", views.BulkAlerts.as_view()),
|
path("bulk/", views.BulkAlerts.as_view()),
|
||||||
path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
path("templates/", views.GetAddAlertTemplates.as_view()),
|
||||||
path("alerttemplates/", views.GetAddAlertTemplates.as_view()),
|
path("templates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
|
||||||
path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
|
path("templates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
|
||||||
path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from rest_framework.views import APIView
|
|||||||
from tacticalrmm.utils import notify_error
|
from tacticalrmm.utils import notify_error
|
||||||
|
|
||||||
from .models import Alert, AlertTemplate
|
from .models import Alert, AlertTemplate
|
||||||
from .permissions import ManageAlertsPerms
|
from .permissions import AlertPerms, AlertTemplatePerms
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
AlertSerializer,
|
AlertSerializer,
|
||||||
AlertTemplateRelationSerializer,
|
AlertTemplateRelationSerializer,
|
||||||
@@ -20,7 +20,7 @@ from .tasks import cache_agents_alert_template
|
|||||||
|
|
||||||
|
|
||||||
class GetAddAlerts(APIView):
|
class GetAddAlerts(APIView):
|
||||||
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
permission_classes = [IsAuthenticated, AlertPerms]
|
||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
|
|
||||||
@@ -92,7 +92,8 @@ class GetAddAlerts(APIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
alerts = (
|
alerts = (
|
||||||
Alert.objects.filter(clientFilter)
|
Alert.objects.filter_by_role(request.user) # type: ignore
|
||||||
|
.filter(clientFilter)
|
||||||
.filter(severityFilter)
|
.filter(severityFilter)
|
||||||
.filter(resolvedFilter)
|
.filter(resolvedFilter)
|
||||||
.filter(snoozedFilter)
|
.filter(snoozedFilter)
|
||||||
@@ -101,7 +102,7 @@ class GetAddAlerts(APIView):
|
|||||||
return Response(AlertSerializer(alerts, many=True).data)
|
return Response(AlertSerializer(alerts, many=True).data)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
alerts = Alert.objects.all()
|
alerts = Alert.objects.filter_by_role(request.user) # type: ignore
|
||||||
return Response(AlertSerializer(alerts, many=True).data)
|
return Response(AlertSerializer(alerts, many=True).data)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
@@ -113,11 +114,10 @@ class GetAddAlerts(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteAlert(APIView):
|
class GetUpdateDeleteAlert(APIView):
|
||||||
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
permission_classes = [IsAuthenticated, AlertPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
alert = get_object_or_404(Alert, pk=pk)
|
alert = get_object_or_404(Alert, pk=pk)
|
||||||
|
|
||||||
return Response(AlertSerializer(alert).data)
|
return Response(AlertSerializer(alert).data)
|
||||||
|
|
||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
@@ -169,7 +169,7 @@ class GetUpdateDeleteAlert(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class BulkAlerts(APIView):
|
class BulkAlerts(APIView):
|
||||||
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
permission_classes = [IsAuthenticated, AlertPerms]
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
if request.data["bulk_action"] == "resolve":
|
if request.data["bulk_action"] == "resolve":
|
||||||
@@ -193,11 +193,10 @@ class BulkAlerts(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetAddAlertTemplates(APIView):
|
class GetAddAlertTemplates(APIView):
|
||||||
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
permission_classes = [IsAuthenticated, AlertTemplatePerms]
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
alert_templates = AlertTemplate.objects.all()
|
alert_templates = AlertTemplate.objects.all()
|
||||||
|
|
||||||
return Response(AlertTemplateSerializer(alert_templates, many=True).data)
|
return Response(AlertTemplateSerializer(alert_templates, many=True).data)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
@@ -212,7 +211,7 @@ class GetAddAlertTemplates(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class GetUpdateDeleteAlertTemplate(APIView):
|
class GetUpdateDeleteAlertTemplate(APIView):
|
||||||
permission_classes = [IsAuthenticated, ManageAlertsPerms]
|
permission_classes = [IsAuthenticated, AlertTemplatePerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||||
@@ -243,6 +242,8 @@ class GetUpdateDeleteAlertTemplate(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class RelatedAlertTemplate(APIView):
|
class RelatedAlertTemplate(APIView):
|
||||||
|
permission_classes = [IsAuthenticated, AlertTemplatePerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||||
return Response(AlertTemplateRelationSerializer(alert_template).data)
|
return Response(AlertTemplateRelationSerializer(alert_template).data)
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
|
from autotasks.models import TaskResult
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
from model_bakery import baker
|
from model_bakery import baker
|
||||||
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from tacticalrmm.test import TacticalTestCase
|
from tacticalrmm.test import TacticalTestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -17,46 +16,53 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
self.agent = baker.make_recipe("agents.agent")
|
self.agent = baker.make_recipe("agents.agent")
|
||||||
|
|
||||||
def test_get_checks(self):
|
def test_get_checks(self):
|
||||||
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
url = f"/api/v3/{agent.agent_id}/checkrunner/"
|
||||||
|
|
||||||
# add a check
|
# add a check
|
||||||
check1 = baker.make_recipe("checks.ping_check", agent=self.agent)
|
check1 = baker.make_recipe("checks.ping_check", agent=agent)
|
||||||
|
check_result1 = baker.make(
|
||||||
|
"checks.CheckResult", agent=agent, assigned_check=check1
|
||||||
|
)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["check_interval"], self.agent.check_interval) # type: ignore
|
self.assertEqual(r.data["check_interval"], self.agent.check_interval)
|
||||||
self.assertEqual(len(r.data["checks"]), 1) # type: ignore
|
self.assertEqual(len(r.data["checks"]), 1)
|
||||||
|
|
||||||
# override check run interval
|
# override check run interval
|
||||||
check2 = baker.make_recipe(
|
check2 = baker.make_recipe(
|
||||||
"checks.ping_check", agent=self.agent, run_interval=20
|
"checks.diskspace_check", agent=agent, run_interval=20
|
||||||
|
)
|
||||||
|
check_result2 = baker.make(
|
||||||
|
"checks.CheckResult", agent=agent, assigned_check=check2
|
||||||
)
|
)
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
self.assertEqual(len(r.data["checks"]), 2)
|
||||||
self.assertEqual(len(r.data["checks"]), 2) # type: ignore
|
self.assertEqual(r.data["check_interval"], 20)
|
||||||
|
|
||||||
# Set last_run on both checks and should return an empty list
|
# Set last_run on both checks and should return an empty list
|
||||||
check1.last_run = djangotime.now()
|
check_result1.last_run = djangotime.now()
|
||||||
check1.save()
|
check_result1.save()
|
||||||
check2.last_run = djangotime.now()
|
check_result2.last_run = djangotime.now()
|
||||||
check2.save()
|
check_result2.save()
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
self.assertEqual(r.data["check_interval"], 20)
|
||||||
self.assertFalse(r.data["checks"]) # type: ignore
|
self.assertFalse(r.data["checks"])
|
||||||
|
|
||||||
# set last_run greater than interval
|
# set last_run greater than interval
|
||||||
check1.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
check_result1.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
||||||
check1.save()
|
check_result1.save()
|
||||||
check2.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
check_result2.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
|
||||||
check2.save()
|
check_result2.save()
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.data["check_interval"], 20) # type: ignore
|
self.assertEqual(r.data["check_interval"], 20)
|
||||||
self.assertEquals(len(r.data["checks"]), 2) # type: ignore
|
self.assertEquals(len(r.data["checks"]), 2)
|
||||||
|
|
||||||
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
|
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
@@ -130,97 +136,34 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
self.assertIsInstance(r.json()["check_interval"], int)
|
self.assertIsInstance(r.json()["check_interval"], int)
|
||||||
self.assertEqual(len(r.json()["checks"]), 15)
|
self.assertEqual(len(r.json()["checks"]), 15)
|
||||||
|
|
||||||
def test_checkin_patch(self):
|
|
||||||
from logs.models import PendingAction
|
|
||||||
|
|
||||||
url = "/api/v3/checkin/"
|
|
||||||
agent_updated = baker.make_recipe("agents.agent", version="1.3.0")
|
|
||||||
PendingAction.objects.create(
|
|
||||||
agent=agent_updated,
|
|
||||||
action_type="agentupdate",
|
|
||||||
details={
|
|
||||||
"url": agent_updated.winagent_dl,
|
|
||||||
"version": agent_updated.version,
|
|
||||||
"inno": agent_updated.win_inno_exe,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
|
||||||
self.assertEqual(action.status, "pending")
|
|
||||||
|
|
||||||
# test agent failed to update and still on same version
|
|
||||||
payload = {
|
|
||||||
"func": "hello",
|
|
||||||
"agent_id": agent_updated.agent_id,
|
|
||||||
"version": "1.3.0",
|
|
||||||
}
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
|
||||||
self.assertEqual(action.status, "pending")
|
|
||||||
|
|
||||||
# test agent successful update
|
|
||||||
payload["version"] = settings.LATEST_AGENT_VER
|
|
||||||
r = self.client.patch(url, payload, format="json")
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
|
||||||
self.assertEqual(action.status, "completed")
|
|
||||||
action.delete()
|
|
||||||
|
|
||||||
@patch("apiv3.views.reload_nats")
|
|
||||||
def test_agent_recovery(self, reload_nats):
|
|
||||||
reload_nats.return_value = "ok"
|
|
||||||
r = self.client.get("/api/v3/34jahsdkjasncASDjhg2b3j4r/recover/")
|
|
||||||
self.assertEqual(r.status_code, 404)
|
|
||||||
|
|
||||||
agent = baker.make_recipe("agents.online_agent")
|
|
||||||
url = f"/api/v3/{agent.agent_id}/recovery/"
|
|
||||||
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.json(), {"mode": "pass", "shellcmd": ""})
|
|
||||||
reload_nats.assert_not_called()
|
|
||||||
|
|
||||||
baker.make("agents.RecoveryAction", agent=agent, mode="mesh")
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.json(), {"mode": "mesh", "shellcmd": ""})
|
|
||||||
reload_nats.assert_not_called()
|
|
||||||
|
|
||||||
baker.make(
|
|
||||||
"agents.RecoveryAction",
|
|
||||||
agent=agent,
|
|
||||||
mode="command",
|
|
||||||
command="shutdown /r /t 5 /f",
|
|
||||||
)
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(
|
|
||||||
r.json(), {"mode": "command", "shellcmd": "shutdown /r /t 5 /f"}
|
|
||||||
)
|
|
||||||
reload_nats.assert_not_called()
|
|
||||||
|
|
||||||
baker.make("agents.RecoveryAction", agent=agent, mode="rpc")
|
|
||||||
r = self.client.get(url)
|
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
self.assertEqual(r.json(), {"mode": "rpc", "shellcmd": ""})
|
|
||||||
reload_nats.assert_called_once()
|
|
||||||
|
|
||||||
def test_task_runner_get(self):
|
def test_task_runner_get(self):
|
||||||
from autotasks.serializers import TaskGOGetSerializer
|
from autotasks.serializers import TaskGOGetSerializer
|
||||||
|
|
||||||
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||||
self.assertEqual(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
# setup data
|
script = baker.make("scripts.script")
|
||||||
agent = baker.make_recipe("agents.agent")
|
|
||||||
script = baker.make_recipe("scripts.script")
|
|
||||||
task = baker.make("autotasks.AutomatedTask", agent=agent, script=script)
|
|
||||||
|
|
||||||
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
# setup data
|
||||||
|
task_actions = [
|
||||||
|
{"type": "cmd", "command": "whoami", "timeout": 10, "shell": "cmd"},
|
||||||
|
{
|
||||||
|
"type": "script",
|
||||||
|
"script": script.id,
|
||||||
|
"script_args": ["test"],
|
||||||
|
"timeout": 30,
|
||||||
|
},
|
||||||
|
{"type": "script", "script": 3, "script_args": [], "timeout": 30},
|
||||||
|
]
|
||||||
|
|
||||||
|
agent = baker.make_recipe("agents.agent")
|
||||||
|
task = baker.make("autotasks.AutomatedTask", agent=agent, actions=task_actions)
|
||||||
|
|
||||||
|
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/"
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(TaskGOGetSerializer(task).data, r.data) # type: ignore
|
self.assertEqual(TaskGOGetSerializer(task).data, r.data)
|
||||||
|
|
||||||
def test_task_runner_results(self):
|
def test_task_runner_results(self):
|
||||||
from agents.models import AgentCustomField
|
from agents.models import AgentCustomField
|
||||||
@@ -231,8 +174,9 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
# setup data
|
# setup data
|
||||||
agent = baker.make_recipe("agents.agent")
|
agent = baker.make_recipe("agents.agent")
|
||||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||||
|
task_result = baker.make("autotasks.TaskResult", agent=agent, task=task)
|
||||||
|
|
||||||
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/"
|
||||||
|
|
||||||
# test passing task
|
# test passing task
|
||||||
data = {
|
data = {
|
||||||
@@ -244,7 +188,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "passing") # type: ignore
|
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "passing")
|
||||||
|
|
||||||
# test failing task
|
# test failing task
|
||||||
data = {
|
data = {
|
||||||
@@ -256,7 +200,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "failing")
|
||||||
|
|
||||||
# test collector task
|
# test collector task
|
||||||
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
|
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
|
||||||
@@ -268,8 +212,8 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# test text fields
|
# test text fields
|
||||||
task.custom_field = text # type: ignore
|
task.custom_field = text
|
||||||
task.save() # type: ignore
|
task.save()
|
||||||
|
|
||||||
# test failing failing with stderr
|
# test failing failing with stderr
|
||||||
data = {
|
data = {
|
||||||
@@ -281,7 +225,7 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "failing")
|
||||||
|
|
||||||
# test saving to text field
|
# test saving to text field
|
||||||
data = {
|
data = {
|
||||||
@@ -293,12 +237,15 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
self.assertEqual(AgentCustomField.objects.get(field=text, agent=task.agent).value, "the last line") # type: ignore
|
self.assertEqual(
|
||||||
|
AgentCustomField.objects.get(field=text, agent=task.agent).value,
|
||||||
|
"the last line",
|
||||||
|
)
|
||||||
|
|
||||||
# test saving to checkbox field
|
# test saving to checkbox field
|
||||||
task.custom_field = boolean # type: ignore
|
task.custom_field = boolean
|
||||||
task.save() # type: ignore
|
task.save()
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"stdout": "1",
|
"stdout": "1",
|
||||||
@@ -309,12 +256,14 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
self.assertTrue(AgentCustomField.objects.get(field=boolean, agent=task.agent).value) # type: ignore
|
self.assertTrue(
|
||||||
|
AgentCustomField.objects.get(field=boolean, agent=task.agent).value
|
||||||
|
)
|
||||||
|
|
||||||
# test saving to multiple field with commas
|
# test saving to multiple field with commas
|
||||||
task.custom_field = multiple # type: ignore
|
task.custom_field = multiple
|
||||||
task.save() # type: ignore
|
task.save()
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"stdout": "this,is,an,array",
|
"stdout": "this,is,an,array",
|
||||||
@@ -325,8 +274,11 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this", "is", "an", "array"]) # type: ignore
|
self.assertEqual(
|
||||||
|
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
|
||||||
|
["this", "is", "an", "array"],
|
||||||
|
)
|
||||||
|
|
||||||
# test mutiple with a single value
|
# test mutiple with a single value
|
||||||
data = {
|
data = {
|
||||||
@@ -338,5 +290,8 @@ class TestAPIv3(TacticalTestCase):
|
|||||||
|
|
||||||
r = self.client.patch(url, data)
|
r = self.client.patch(url, data)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
|
||||||
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this"]) # type: ignore
|
self.assertEqual(
|
||||||
|
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
|
||||||
|
["this"],
|
||||||
|
)
|
||||||
|
|||||||
@@ -19,6 +19,5 @@ urlpatterns = [
|
|||||||
path("winupdates/", views.WinUpdates.as_view()),
|
path("winupdates/", views.WinUpdates.as_view()),
|
||||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||||
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
|
||||||
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
|
|
||||||
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
|
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,139 +1,43 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import os
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from accounts.models import User
|
||||||
|
from agents.models import Agent, AgentHistory
|
||||||
|
from agents.serializers import AgentHistorySerializer
|
||||||
|
from autotasks.models import AutomatedTask, TaskResult
|
||||||
|
from autotasks.serializers import TaskGOGetSerializer, TaskResultSerializer
|
||||||
|
from checks.models import Check, CheckResult
|
||||||
|
from checks.serializers import CheckRunnerGetSerializer
|
||||||
|
from core.utils import get_core_settings
|
||||||
|
from core.utils import download_mesh_agent, get_mesh_device_id, get_mesh_ws_url
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.http import HttpResponse
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone as djangotime
|
from django.utils import timezone as djangotime
|
||||||
|
from logs.models import DebugLog, PendingAction
|
||||||
from packaging import version as pyver
|
from packaging import version as pyver
|
||||||
from rest_framework.authentication import TokenAuthentication
|
from rest_framework.authentication import TokenAuthentication
|
||||||
from rest_framework.authtoken.models import Token
|
from rest_framework.authtoken.models import Token
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from accounts.models import User
|
|
||||||
from agents.models import Agent, AgentHistory
|
|
||||||
from agents.serializers import WinAgentSerializer, AgentHistorySerializer
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
|
||||||
from checks.models import Check
|
|
||||||
from checks.serializers import CheckRunnerGetSerializer
|
|
||||||
from checks.utils import bytes2human
|
|
||||||
from logs.models import PendingAction, DebugLog
|
|
||||||
from software.models import InstalledSoftware
|
from software.models import InstalledSoftware
|
||||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
|
||||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||||
|
|
||||||
|
from tacticalrmm.constants import MeshAgentIdent
|
||||||
|
from tacticalrmm.utils import notify_error, reload_nats
|
||||||
|
|
||||||
|
|
||||||
class CheckIn(APIView):
|
class CheckIn(APIView):
|
||||||
|
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def patch(self, request):
|
|
||||||
"""
|
|
||||||
!!! DEPRECATED AS OF AGENT 1.6.0 !!!
|
|
||||||
Endpoint be removed in a future release
|
|
||||||
"""
|
|
||||||
from alerts.models import Alert
|
|
||||||
|
|
||||||
updated = False
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
if pyver.parse(request.data["version"]) > pyver.parse(
|
|
||||||
agent.version
|
|
||||||
) or pyver.parse(request.data["version"]) == pyver.parse(
|
|
||||||
settings.LATEST_AGENT_VER
|
|
||||||
):
|
|
||||||
updated = True
|
|
||||||
agent.version = request.data["version"]
|
|
||||||
agent.last_seen = djangotime.now()
|
|
||||||
agent.save(update_fields=["version", "last_seen"])
|
|
||||||
|
|
||||||
# change agent update pending status to completed if agent has just updated
|
|
||||||
if (
|
|
||||||
updated
|
|
||||||
and agent.pendingactions.filter( # type: ignore
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).exists()
|
|
||||||
):
|
|
||||||
agent.pendingactions.filter( # type: ignore
|
|
||||||
action_type="agentupdate", status="pending"
|
|
||||||
).update(status="completed")
|
|
||||||
|
|
||||||
# handles any alerting actions
|
|
||||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
|
||||||
Alert.handle_alert_resolve(agent)
|
|
||||||
|
|
||||||
# sync scheduled tasks
|
|
||||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
|
||||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
|
||||||
|
|
||||||
for task in tasks:
|
|
||||||
if task.sync_status == "pendingdeletion":
|
|
||||||
task.delete_task_on_agent()
|
|
||||||
elif task.sync_status == "initial":
|
|
||||||
task.modify_task_on_agent()
|
|
||||||
elif task.sync_status == "notsynced":
|
|
||||||
task.create_task_on_agent()
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
def put(self, request):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
|
||||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
|
||||||
|
|
||||||
if request.data["func"] == "disks":
|
|
||||||
disks = request.data["disks"]
|
|
||||||
new = []
|
|
||||||
for disk in disks:
|
|
||||||
tmp = {}
|
|
||||||
for _, _ in disk.items():
|
|
||||||
tmp["device"] = disk["device"]
|
|
||||||
tmp["fstype"] = disk["fstype"]
|
|
||||||
tmp["total"] = bytes2human(disk["total"])
|
|
||||||
tmp["used"] = bytes2human(disk["used"])
|
|
||||||
tmp["free"] = bytes2human(disk["free"])
|
|
||||||
tmp["percent"] = int(disk["percent"])
|
|
||||||
new.append(tmp)
|
|
||||||
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
serializer.save(disks=new)
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
if request.data["func"] == "loggedonuser":
|
|
||||||
if request.data["logged_in_username"] != "None":
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
if request.data["func"] == "software":
|
|
||||||
raw: SoftwareList = request.data["software"]
|
|
||||||
if not isinstance(raw, list):
|
|
||||||
return notify_error("err")
|
|
||||||
|
|
||||||
sw = filter_software(raw)
|
|
||||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
|
||||||
InstalledSoftware(agent=agent, software=sw).save()
|
|
||||||
else:
|
|
||||||
s = agent.installedsoftware_set.first() # type: ignore
|
|
||||||
s.software = sw
|
|
||||||
s.save(update_fields=["software"])
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
serializer.save()
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
# called once during tacticalagent windows service startup
|
# called once during tacticalagent windows service startup
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
if not agent.choco_installed:
|
if not agent.choco_installed:
|
||||||
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
|
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
|
||||||
|
|
||||||
time.sleep(0.5)
|
|
||||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -168,18 +72,18 @@ class WinUpdates(APIView):
|
|||||||
|
|
||||||
def put(self, request):
|
def put(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
|
|
||||||
|
needs_reboot: bool = request.data["needs_reboot"]
|
||||||
|
agent.needs_reboot = needs_reboot
|
||||||
|
agent.save(update_fields=["needs_reboot"])
|
||||||
|
|
||||||
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||||
reboot = False
|
reboot = False
|
||||||
|
|
||||||
if reboot_policy == "always":
|
if reboot_policy == "always":
|
||||||
reboot = True
|
reboot = True
|
||||||
|
elif needs_reboot and reboot_policy == "required":
|
||||||
if request.data["needs_reboot"]:
|
|
||||||
if reboot_policy == "required":
|
|
||||||
reboot = True
|
reboot = True
|
||||||
elif reboot_policy == "never":
|
|
||||||
agent.needs_reboot = True
|
|
||||||
agent.save(update_fields=["needs_reboot"])
|
|
||||||
|
|
||||||
if reboot:
|
if reboot:
|
||||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||||
@@ -195,6 +99,9 @@ class WinUpdates(APIView):
|
|||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
u = agent.winupdates.filter(guid=request.data["guid"]).last() # type: ignore
|
u = agent.winupdates.filter(guid=request.data["guid"]).last() # type: ignore
|
||||||
|
if not u:
|
||||||
|
raise WinUpdate.DoesNotExist
|
||||||
|
|
||||||
success: bool = request.data["success"]
|
success: bool = request.data["success"]
|
||||||
if success:
|
if success:
|
||||||
u.result = "success"
|
u.result = "success"
|
||||||
@@ -249,14 +156,6 @@ class WinUpdates(APIView):
|
|||||||
).save()
|
).save()
|
||||||
|
|
||||||
agent.delete_superseded_updates()
|
agent.delete_superseded_updates()
|
||||||
|
|
||||||
# more superseded updates cleanup
|
|
||||||
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
|
|
||||||
for u in agent.winupdates.filter( # type: ignore
|
|
||||||
date_installed__isnull=True, result="failed"
|
|
||||||
).exclude(installed=True):
|
|
||||||
u.delete()
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
@@ -279,7 +178,7 @@ class RunChecks(APIView):
|
|||||||
|
|
||||||
def get(self, request, agentid):
|
def get(self, request, agentid):
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
checks = agent.get_checks_with_policies(exclude_overridden=True)
|
||||||
ret = {
|
ret = {
|
||||||
"agent": agent.pk,
|
"agent": agent.pk,
|
||||||
"check_interval": agent.check_interval,
|
"check_interval": agent.check_interval,
|
||||||
@@ -294,29 +193,26 @@ class CheckRunner(APIView):
|
|||||||
|
|
||||||
def get(self, request, agentid):
|
def get(self, request, agentid):
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
checks = agent.agentchecks.filter(overriden_by_policy=False) # type: ignore
|
checks = agent.get_checks_with_policies(exclude_overridden=True)
|
||||||
|
|
||||||
run_list = [
|
run_list = [
|
||||||
check
|
check
|
||||||
for check in checks
|
for check in checks
|
||||||
# always run if check hasn't run yet
|
# always run if check hasn't run yet
|
||||||
if not check.last_run
|
if not isinstance(check.check_result, CheckResult)
|
||||||
# if a check interval is set, see if the correct amount of seconds have passed
|
or not check.check_result.last_run
|
||||||
|
# see if the correct amount of seconds have passed
|
||||||
or (
|
or (
|
||||||
check.run_interval
|
check.check_result.last_run
|
||||||
and (
|
|
||||||
check.last_run
|
|
||||||
< djangotime.now()
|
< djangotime.now()
|
||||||
- djangotime.timedelta(seconds=check.run_interval)
|
- djangotime.timedelta(
|
||||||
|
seconds=check.run_interval
|
||||||
|
if check.run_interval
|
||||||
|
else agent.check_interval
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# if check interval isn't set, make sure the agent's check interval has passed before running
|
|
||||||
or (
|
|
||||||
not check.run_interval
|
|
||||||
and check.last_run
|
|
||||||
< djangotime.now() - djangotime.timedelta(seconds=agent.check_interval)
|
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
"agent": agent.pk,
|
"agent": agent.pk,
|
||||||
"check_interval": agent.check_run_interval(),
|
"check_interval": agent.check_run_interval(),
|
||||||
@@ -326,14 +222,29 @@ class CheckRunner(APIView):
|
|||||||
|
|
||||||
def patch(self, request):
|
def patch(self, request):
|
||||||
check = get_object_or_404(Check, pk=request.data["id"])
|
check = get_object_or_404(Check, pk=request.data["id"])
|
||||||
if pyver.parse(check.agent.version) < pyver.parse("1.5.7"):
|
|
||||||
return notify_error("unsupported")
|
|
||||||
|
|
||||||
check.last_run = djangotime.now()
|
if "agent_id" not in request.data.keys():
|
||||||
check.save(update_fields=["last_run"])
|
return notify_error("Agent upgrade required")
|
||||||
status = check.handle_check(request.data)
|
|
||||||
if status == "failing" and check.assignedtask.exists(): # type: ignore
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
check.handle_assigned_task()
|
|
||||||
|
# check check result or create if doesn't exist
|
||||||
|
try:
|
||||||
|
check_result = CheckResult.objects.get(assigned_check=check, agent=agent)
|
||||||
|
except CheckResult.DoesNotExist:
|
||||||
|
check_result = CheckResult(assigned_check=check, agent=agent)
|
||||||
|
|
||||||
|
check_result.last_run = djangotime.now()
|
||||||
|
check_result.save()
|
||||||
|
|
||||||
|
status = check_result.handle_check(request.data)
|
||||||
|
if status == "failing" and check.assignedtasks.exists(): # type: ignore
|
||||||
|
for task in check.assignedtasks.all(): # type: ignore
|
||||||
|
if task.enabled:
|
||||||
|
if task.policy:
|
||||||
|
task.run_win_task(agent)
|
||||||
|
else:
|
||||||
|
task.run_win_task()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -365,32 +276,49 @@ class TaskRunner(APIView):
|
|||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||||
|
|
||||||
serializer = TaskRunnerPatchSerializer(
|
# check check result or create if doesn't exist
|
||||||
instance=task, data=request.data, partial=True
|
try:
|
||||||
|
task_result = TaskResult.objects.get(task=task, agent=agent)
|
||||||
|
serializer = TaskResultSerializer(
|
||||||
|
data=request.data, instance=task_result, partial=True
|
||||||
)
|
)
|
||||||
|
except TaskResult.DoesNotExist:
|
||||||
|
serializer = TaskResultSerializer(data=request.data, partial=True)
|
||||||
|
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
new_task = serializer.save(last_run=djangotime.now())
|
task_result = serializer.save(last_run=djangotime.now())
|
||||||
|
|
||||||
|
AgentHistory.objects.create(
|
||||||
|
agent=agent,
|
||||||
|
type="task_run",
|
||||||
|
command=task.name,
|
||||||
|
script_results=request.data,
|
||||||
|
)
|
||||||
|
|
||||||
# check if task is a collector and update the custom field
|
# check if task is a collector and update the custom field
|
||||||
if task.custom_field:
|
if task.custom_field:
|
||||||
if not task.stderr:
|
if not task_result.stderr:
|
||||||
|
|
||||||
task.save_collector_results()
|
task_result.save_collector_results()
|
||||||
|
|
||||||
status = "passing"
|
status = "passing"
|
||||||
else:
|
else:
|
||||||
status = "failing"
|
status = "failing"
|
||||||
else:
|
else:
|
||||||
status = "failing" if task.retcode != 0 else "passing"
|
status = "failing" if task_result.retcode != 0 else "passing"
|
||||||
|
|
||||||
new_task.status = status
|
if task_result:
|
||||||
new_task.save()
|
task_result.status = status
|
||||||
|
task_result.save(update_fields=["status"])
|
||||||
|
else:
|
||||||
|
task_result.status = status
|
||||||
|
task.save(update_fields=["status"])
|
||||||
|
|
||||||
if status == "passing":
|
if status == "passing":
|
||||||
if Alert.objects.filter(assigned_task=new_task, resolved=False).exists():
|
if Alert.create_or_return_task_alert(task, agent=agent, skip_create=True):
|
||||||
Alert.handle_alert_resolve(new_task)
|
Alert.handle_alert_resolve(task_result)
|
||||||
else:
|
else:
|
||||||
Alert.handle_alert_failure(new_task)
|
Alert.handle_alert_failure(task_result)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
@@ -414,25 +342,33 @@ class MeshExe(APIView):
|
|||||||
"""Sends the mesh exe to the installer"""
|
"""Sends the mesh exe to the installer"""
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
exe = "meshagent.exe" if request.data["arch"] == "64" else "meshagent-x86.exe"
|
match request.data:
|
||||||
mesh_exe = os.path.join(settings.EXE_DIR, exe)
|
case {"arch": "64", "plat": "windows"}:
|
||||||
|
arch = MeshAgentIdent.WIN64
|
||||||
|
case {"arch": "32", "plat": "windows"}:
|
||||||
|
arch = MeshAgentIdent.WIN32
|
||||||
|
case _:
|
||||||
|
return notify_error("Arch not specified")
|
||||||
|
|
||||||
if not os.path.exists(mesh_exe):
|
core = get_core_settings()
|
||||||
return notify_error("Mesh Agent executable not found")
|
|
||||||
|
|
||||||
if settings.DEBUG:
|
try:
|
||||||
with open(mesh_exe, "rb") as f:
|
uri = get_mesh_ws_url()
|
||||||
response = HttpResponse(
|
mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group))
|
||||||
f.read(),
|
except:
|
||||||
content_type="application/vnd.microsoft.portable-executable",
|
return notify_error("Unable to connect to mesh to get group id information")
|
||||||
)
|
|
||||||
response["Content-Disposition"] = f"inline; filename={exe}"
|
if settings.DOCKER_BUILD:
|
||||||
return response
|
dl_url = f"{settings.MESH_WS_URL.replace('ws://', 'http://')}/meshagents?id={arch}&meshid={mesh_id}&installflags=0"
|
||||||
else:
|
else:
|
||||||
response = HttpResponse()
|
dl_url = (
|
||||||
response["Content-Disposition"] = f"attachment; filename={exe}"
|
f"{core.mesh_site}/meshagents?id={arch}&meshid={mesh_id}&installflags=0"
|
||||||
response["X-Accel-Redirect"] = f"/private/exe/{exe}"
|
)
|
||||||
return response
|
|
||||||
|
try:
|
||||||
|
return download_mesh_agent(dl_url)
|
||||||
|
except:
|
||||||
|
return notify_error("Unable to download mesh agent exe")
|
||||||
|
|
||||||
|
|
||||||
class NewAgent(APIView):
|
class NewAgent(APIView):
|
||||||
@@ -453,11 +389,11 @@ class NewAgent(APIView):
|
|||||||
monitoring_type=request.data["monitoring_type"],
|
monitoring_type=request.data["monitoring_type"],
|
||||||
description=request.data["description"],
|
description=request.data["description"],
|
||||||
mesh_node_id=request.data["mesh_node_id"],
|
mesh_node_id=request.data["mesh_node_id"],
|
||||||
|
goarch=request.data["goarch"],
|
||||||
|
plat=request.data["plat"],
|
||||||
last_seen=djangotime.now(),
|
last_seen=djangotime.now(),
|
||||||
)
|
)
|
||||||
agent.save()
|
agent.save()
|
||||||
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
|
||||||
agent.save(update_fields=["salt_id"])
|
|
||||||
|
|
||||||
user = User.objects.create_user( # type: ignore
|
user = User.objects.create_user( # type: ignore
|
||||||
username=request.data["agent_id"],
|
username=request.data["agent_id"],
|
||||||
@@ -485,13 +421,8 @@ class NewAgent(APIView):
|
|||||||
debug_info={"ip": request._client_ip},
|
debug_info={"ip": request._client_ip},
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(
|
ret = {"pk": agent.pk, "token": token.key}
|
||||||
{
|
return Response(ret)
|
||||||
"pk": agent.pk,
|
|
||||||
"saltid": f"{agent.hostname}-{agent.pk}",
|
|
||||||
"token": token.key,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Software(APIView):
|
class Software(APIView):
|
||||||
@@ -500,11 +431,7 @@ class Software(APIView):
|
|||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||||
raw: SoftwareList = request.data["software"]
|
sw = request.data["software"]
|
||||||
if not isinstance(raw, list):
|
|
||||||
return notify_error("err")
|
|
||||||
|
|
||||||
sw = filter_software(raw)
|
|
||||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||||
InstalledSoftware(agent=agent, software=sw).save()
|
InstalledSoftware(agent=agent, software=sw).save()
|
||||||
else:
|
else:
|
||||||
@@ -565,30 +492,6 @@ class ChocoResult(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class AgentRecovery(APIView):
|
|
||||||
authentication_classes = [TokenAuthentication]
|
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
|
|
||||||
def get(self, request, agentid):
|
|
||||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
|
||||||
recovery = agent.recoveryactions.filter(last_run=None).last() # type: ignore
|
|
||||||
ret = {"mode": "pass", "shellcmd": ""}
|
|
||||||
if recovery is None:
|
|
||||||
return Response(ret)
|
|
||||||
|
|
||||||
recovery.last_run = djangotime.now()
|
|
||||||
recovery.save(update_fields=["last_run"])
|
|
||||||
|
|
||||||
ret["mode"] = recovery.mode
|
|
||||||
|
|
||||||
if recovery.mode == "command":
|
|
||||||
ret["shellcmd"] = recovery.command
|
|
||||||
elif recovery.mode == "rpc":
|
|
||||||
reload_nats()
|
|
||||||
|
|
||||||
return Response(ret)
|
|
||||||
|
|
||||||
|
|
||||||
class AgentHistoryResult(APIView):
|
class AgentHistoryResult(APIView):
|
||||||
authentication_classes = [TokenAuthentication]
|
authentication_classes = [TokenAuthentication]
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-09-17 19:54
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("automation", "0008_auto_20210302_0415"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="policy",
|
||||||
|
name="created_by",
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="policy",
|
||||||
|
name="modified_by",
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -1,9 +1,17 @@
|
|||||||
from django.db import models
|
|
||||||
|
|
||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from core.models import CoreSettings
|
from clients.models import Client, Site
|
||||||
|
from django.db import models
|
||||||
|
from django.core.cache import cache
|
||||||
from logs.models import BaseAuditModel
|
from logs.models import BaseAuditModel
|
||||||
|
|
||||||
|
from typing import Optional, Dict, Any, List, TYPE_CHECKING
|
||||||
|
|
||||||
|
from tacticalrmm.constants import CORESETTINGS_CACHE_KEY
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from checks.models import Check
|
||||||
|
from autotasks.models import AutomatedTask
|
||||||
|
|
||||||
|
|
||||||
class Policy(BaseAuditModel):
|
class Policy(BaseAuditModel):
|
||||||
name = models.CharField(max_length=255, unique=True)
|
name = models.CharField(max_length=255, unique=True)
|
||||||
@@ -27,366 +35,291 @@ class Policy(BaseAuditModel):
|
|||||||
"agents.Agent", related_name="policy_exclusions", blank=True
|
"agents.Agent", related_name="policy_exclusions", blank=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||||
from alerts.tasks import cache_agents_alert_template
|
from alerts.tasks import cache_agents_alert_template
|
||||||
from automation.tasks import generate_agent_checks_task
|
|
||||||
|
|
||||||
# get old policy if exists
|
# get old policy if exists
|
||||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
old_policy: Optional[Policy] = (
|
||||||
|
type(self).objects.get(pk=self.pk) if self.pk else None
|
||||||
|
)
|
||||||
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
|
||||||
|
|
||||||
# generate agent checks only if active and enforced were changed
|
# check if alert template was changes and cache on agents
|
||||||
if old_policy:
|
if old_policy:
|
||||||
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
|
||||||
generate_agent_checks_task.delay(
|
|
||||||
policy=self.pk,
|
|
||||||
create_tasks=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if old_policy.alert_template != self.alert_template:
|
if old_policy.alert_template != self.alert_template:
|
||||||
cache_agents_alert_template.delay()
|
cache_agents_alert_template.delay()
|
||||||
|
elif self.alert_template and old_policy.active != self.active:
|
||||||
|
cache_agents_alert_template.delay()
|
||||||
|
|
||||||
|
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
||||||
|
cache.delete(CORESETTINGS_CACHE_KEY)
|
||||||
|
cache.delete_many_pattern("site_workstation_*")
|
||||||
|
cache.delete_many_pattern("site_server_*")
|
||||||
|
cache.delete_many_pattern("agent_*")
|
||||||
|
|
||||||
def delete(self, *args, **kwargs):
|
def delete(self, *args, **kwargs):
|
||||||
from automation.tasks import generate_agent_checks_task
|
cache.delete(CORESETTINGS_CACHE_KEY)
|
||||||
|
cache.delete_many_pattern("site_workstation_*")
|
||||||
|
cache.delete_many_pattern("site_server_*")
|
||||||
|
cache.delete_many_pattern("agent_*")
|
||||||
|
|
||||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
super(Policy, self).delete(
|
||||||
super(Policy, self).delete(*args, **kwargs)
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
def __str__(self) -> str:
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_default_server_policy(self):
|
def is_default_server_policy(self) -> bool:
|
||||||
return self.default_server_policy.exists() # type: ignore
|
return self.default_server_policy.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_default_workstation_policy(self):
|
def is_default_workstation_policy(self) -> bool:
|
||||||
return self.default_workstation_policy.exists() # type: ignore
|
return self.default_workstation_policy.exists()
|
||||||
|
|
||||||
def is_agent_excluded(self, agent):
|
def is_agent_excluded(self, agent: "Agent") -> bool:
|
||||||
return (
|
return (
|
||||||
agent in self.excluded_agents.all()
|
agent in self.excluded_agents.all()
|
||||||
or agent.site in self.excluded_sites.all()
|
or agent.site in self.excluded_sites.all()
|
||||||
or agent.client in self.excluded_clients.all()
|
or agent.client in self.excluded_clients.all()
|
||||||
)
|
)
|
||||||
|
|
||||||
def related_agents(self):
|
def related_agents(
|
||||||
return self.get_related("server") | self.get_related("workstation")
|
self, mon_type: Optional[str] = None
|
||||||
|
) -> "models.QuerySet[Agent]":
|
||||||
|
models.prefetch_related_objects(
|
||||||
|
[self],
|
||||||
|
"excluded_agents",
|
||||||
|
"excluded_sites",
|
||||||
|
"excluded_clients",
|
||||||
|
"workstation_clients",
|
||||||
|
"server_clients",
|
||||||
|
"workstation_sites",
|
||||||
|
"server_sites",
|
||||||
|
"agents",
|
||||||
|
)
|
||||||
|
|
||||||
|
agent_filter = {}
|
||||||
|
filtered_agents_ids = Agent.objects.none()
|
||||||
|
|
||||||
|
if mon_type:
|
||||||
|
agent_filter["monitoring_type"] = mon_type
|
||||||
|
|
||||||
|
excluded_clients_ids = self.excluded_clients.only("pk").values_list(
|
||||||
|
"id", flat=True
|
||||||
|
)
|
||||||
|
excluded_sites_ids = self.excluded_sites.only("pk").values_list("id", flat=True)
|
||||||
|
excluded_agents_ids = self.excluded_agents.only("pk").values_list(
|
||||||
|
"id", flat=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.is_default_server_policy:
|
||||||
|
filtered_agents_ids |= (
|
||||||
|
Agent.objects.exclude(block_policy_inheritance=True)
|
||||||
|
.exclude(site__block_policy_inheritance=True)
|
||||||
|
.exclude(site__client__block_policy_inheritance=True)
|
||||||
|
.exclude(id__in=excluded_agents_ids)
|
||||||
|
.exclude(site_id__in=excluded_sites_ids)
|
||||||
|
.exclude(site__client_id__in=excluded_clients_ids)
|
||||||
|
.filter(monitoring_type="server")
|
||||||
|
.only("id")
|
||||||
|
.values_list("id", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.is_default_workstation_policy:
|
||||||
|
filtered_agents_ids |= (
|
||||||
|
Agent.objects.exclude(block_policy_inheritance=True)
|
||||||
|
.exclude(site__block_policy_inheritance=True)
|
||||||
|
.exclude(site__client__block_policy_inheritance=True)
|
||||||
|
.exclude(id__in=excluded_agents_ids)
|
||||||
|
.exclude(site_id__in=excluded_sites_ids)
|
||||||
|
.exclude(site__client_id__in=excluded_clients_ids)
|
||||||
|
.filter(monitoring_type="workstation")
|
||||||
|
.only("id")
|
||||||
|
.values_list("id", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
# if this is the default policy for servers and workstations and skip the other calculations
|
||||||
|
if self.is_default_server_policy and self.is_default_workstation_policy:
|
||||||
|
return Agent.objects.filter(models.Q(id__in=filtered_agents_ids))
|
||||||
|
|
||||||
def get_related(self, mon_type):
|
|
||||||
explicit_agents = (
|
explicit_agents = (
|
||||||
self.agents.filter(monitoring_type=mon_type) # type: ignore
|
self.agents.filter(**agent_filter) # type: ignore
|
||||||
.exclude(
|
.exclude(id__in=excluded_agents_ids)
|
||||||
pk__in=self.excluded_agents.only("pk").values_list("pk", flat=True)
|
.exclude(site_id__in=excluded_sites_ids)
|
||||||
)
|
.exclude(site__client_id__in=excluded_clients_ids)
|
||||||
.exclude(site__in=self.excluded_sites.all())
|
|
||||||
.exclude(site__client__in=self.excluded_clients.all())
|
|
||||||
)
|
)
|
||||||
|
|
||||||
explicit_clients = getattr(self, f"{mon_type}_clients").exclude(
|
explicit_clients_qs = Client.objects.none()
|
||||||
pk__in=self.excluded_clients.all()
|
explicit_sites_qs = Site.objects.none()
|
||||||
|
|
||||||
|
if not mon_type or mon_type == "workstation":
|
||||||
|
explicit_clients_qs |= self.workstation_clients.exclude( # type: ignore
|
||||||
|
id__in=excluded_clients_ids
|
||||||
)
|
)
|
||||||
explicit_sites = getattr(self, f"{mon_type}_sites").exclude(
|
explicit_sites_qs |= self.workstation_sites.exclude( # type: ignore
|
||||||
pk__in=self.excluded_sites.all()
|
id__in=excluded_sites_ids
|
||||||
)
|
)
|
||||||
|
|
||||||
filtered_agents_pks = Policy.objects.none()
|
if not mon_type or mon_type == "server":
|
||||||
|
explicit_clients_qs |= self.server_clients.exclude( # type: ignore
|
||||||
|
id__in=excluded_clients_ids
|
||||||
|
)
|
||||||
|
explicit_sites_qs |= self.server_sites.exclude( # type: ignore
|
||||||
|
id__in=excluded_sites_ids
|
||||||
|
)
|
||||||
|
|
||||||
filtered_agents_pks |= (
|
filtered_agents_ids |= (
|
||||||
Agent.objects.exclude(block_policy_inheritance=True)
|
Agent.objects.exclude(block_policy_inheritance=True)
|
||||||
.filter(
|
.filter(
|
||||||
site__in=[
|
site_id__in=[
|
||||||
site
|
site.id
|
||||||
for site in explicit_sites
|
for site in explicit_sites_qs
|
||||||
if site.client not in explicit_clients
|
if site.client not in explicit_clients_qs
|
||||||
and site.client not in self.excluded_clients.all()
|
and site.client.id not in excluded_clients_ids
|
||||||
],
|
],
|
||||||
monitoring_type=mon_type,
|
**agent_filter,
|
||||||
)
|
)
|
||||||
.values_list("pk", flat=True)
|
.only("id")
|
||||||
|
.values_list("id", flat=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
filtered_agents_pks |= (
|
filtered_agents_ids |= (
|
||||||
Agent.objects.exclude(block_policy_inheritance=True)
|
Agent.objects.exclude(block_policy_inheritance=True)
|
||||||
.exclude(site__block_policy_inheritance=True)
|
.exclude(site__block_policy_inheritance=True)
|
||||||
.filter(
|
.filter(
|
||||||
site__client__in=[client for client in explicit_clients],
|
site__client__in=explicit_clients_qs,
|
||||||
monitoring_type=mon_type,
|
**agent_filter,
|
||||||
)
|
)
|
||||||
.values_list("pk", flat=True)
|
.only("id")
|
||||||
|
.values_list("id", flat=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
return Agent.objects.filter(
|
return Agent.objects.filter(
|
||||||
models.Q(pk__in=filtered_agents_pks)
|
models.Q(id__in=filtered_agents_ids)
|
||||||
| models.Q(pk__in=explicit_agents.only("pk"))
|
| models.Q(id__in=explicit_agents.only("id"))
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(policy):
|
def serialize(policy: "Policy") -> Dict[str, Any]:
|
||||||
# serializes the policy and returns json
|
# serializes the policy and returns json
|
||||||
from .serializers import PolicyAuditSerializer
|
from .serializers import PolicyAuditSerializer
|
||||||
|
|
||||||
return PolicyAuditSerializer(policy).data
|
return PolicyAuditSerializer(policy).data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cascade_policy_tasks(agent):
|
def get_policy_tasks(agent: "Agent") -> "List[AutomatedTask]":
|
||||||
|
|
||||||
# List of all tasks to be applied
|
# List of all tasks to be applied
|
||||||
tasks = list()
|
tasks = list()
|
||||||
added_task_pks = list()
|
|
||||||
|
|
||||||
agent_tasks_parent_pks = [
|
|
||||||
task.parent_task for task in agent.autotasks.filter(managed_by_policy=True)
|
|
||||||
]
|
|
||||||
|
|
||||||
# Get policies applied to agent and agent site and client
|
# Get policies applied to agent and agent site and client
|
||||||
client = agent.client
|
policies = agent.get_agent_policies()
|
||||||
site = agent.site
|
|
||||||
|
|
||||||
default_policy = None
|
processed_policies = list()
|
||||||
client_policy = None
|
|
||||||
site_policy = None
|
|
||||||
agent_policy = agent.policy
|
|
||||||
|
|
||||||
# Get the Client/Site policy based on if the agent is server or workstation
|
for _, policy in policies.items():
|
||||||
if agent.monitoring_type == "server":
|
if policy and policy.active and policy.pk not in processed_policies:
|
||||||
default_policy = CoreSettings.objects.first().server_policy
|
processed_policies.append(policy.pk)
|
||||||
client_policy = client.server_policy
|
for task in policy.autotasks.all():
|
||||||
site_policy = site.server_policy
|
|
||||||
elif agent.monitoring_type == "workstation":
|
|
||||||
default_policy = CoreSettings.objects.first().workstation_policy
|
|
||||||
client_policy = client.workstation_policy
|
|
||||||
site_policy = site.workstation_policy
|
|
||||||
|
|
||||||
# check if client/site/agent is blocking inheritance and blank out policies
|
|
||||||
if agent.block_policy_inheritance:
|
|
||||||
site_policy = None
|
|
||||||
client_policy = None
|
|
||||||
default_policy = None
|
|
||||||
elif site.block_policy_inheritance:
|
|
||||||
client_policy = None
|
|
||||||
default_policy = None
|
|
||||||
elif client.block_policy_inheritance:
|
|
||||||
default_policy = None
|
|
||||||
|
|
||||||
if (
|
|
||||||
agent_policy
|
|
||||||
and agent_policy.active
|
|
||||||
and not agent_policy.is_agent_excluded(agent)
|
|
||||||
):
|
|
||||||
for task in agent_policy.autotasks.all():
|
|
||||||
if task.pk not in added_task_pks:
|
|
||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
added_task_pks.append(task.pk)
|
|
||||||
if (
|
|
||||||
site_policy
|
|
||||||
and site_policy.active
|
|
||||||
and not site_policy.is_agent_excluded(agent)
|
|
||||||
):
|
|
||||||
for task in site_policy.autotasks.all():
|
|
||||||
if task.pk not in added_task_pks:
|
|
||||||
tasks.append(task)
|
|
||||||
added_task_pks.append(task.pk)
|
|
||||||
if (
|
|
||||||
client_policy
|
|
||||||
and client_policy.active
|
|
||||||
and not client_policy.is_agent_excluded(agent)
|
|
||||||
):
|
|
||||||
for task in client_policy.autotasks.all():
|
|
||||||
if task.pk not in added_task_pks:
|
|
||||||
tasks.append(task)
|
|
||||||
added_task_pks.append(task.pk)
|
|
||||||
|
|
||||||
if (
|
return tasks
|
||||||
default_policy
|
|
||||||
and default_policy.active
|
|
||||||
and not default_policy.is_agent_excluded(agent)
|
|
||||||
):
|
|
||||||
for task in default_policy.autotasks.all():
|
|
||||||
if task.pk not in added_task_pks:
|
|
||||||
tasks.append(task)
|
|
||||||
added_task_pks.append(task.pk)
|
|
||||||
|
|
||||||
# remove policy tasks from agent not included in policy
|
|
||||||
for task in agent.autotasks.filter(
|
|
||||||
parent_task__in=[
|
|
||||||
taskpk
|
|
||||||
for taskpk in agent_tasks_parent_pks
|
|
||||||
if taskpk not in added_task_pks
|
|
||||||
]
|
|
||||||
):
|
|
||||||
if task.sync_status == "initial":
|
|
||||||
task.delete()
|
|
||||||
else:
|
|
||||||
task.sync_status = "pendingdeletion"
|
|
||||||
task.save()
|
|
||||||
|
|
||||||
# change tasks from pendingdeletion to notsynced if policy was added or changed
|
|
||||||
agent.autotasks.filter(sync_status="pendingdeletion").filter(
|
|
||||||
parent_task__in=[taskpk for taskpk in added_task_pks]
|
|
||||||
).update(sync_status="notsynced")
|
|
||||||
|
|
||||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cascade_policy_checks(agent):
|
def get_policy_checks(agent: "Agent") -> "List[Check]":
|
||||||
# Get checks added to agent directly
|
|
||||||
agent_checks = list(agent.agentchecks.filter(managed_by_policy=False))
|
|
||||||
|
|
||||||
agent_checks_parent_pks = [
|
# Get checks added to agent directly
|
||||||
check.parent_check
|
agent_checks = list(agent.agentchecks.all())
|
||||||
for check in agent.agentchecks.filter(managed_by_policy=True)
|
|
||||||
]
|
|
||||||
|
|
||||||
# Get policies applied to agent and agent site and client
|
# Get policies applied to agent and agent site and client
|
||||||
client = agent.client
|
policies = agent.get_agent_policies()
|
||||||
site = agent.site
|
|
||||||
|
|
||||||
default_policy = None
|
|
||||||
client_policy = None
|
|
||||||
site_policy = None
|
|
||||||
agent_policy = agent.policy
|
|
||||||
|
|
||||||
if agent.monitoring_type == "server":
|
|
||||||
default_policy = CoreSettings.objects.first().server_policy
|
|
||||||
client_policy = client.server_policy
|
|
||||||
site_policy = site.server_policy
|
|
||||||
elif agent.monitoring_type == "workstation":
|
|
||||||
default_policy = CoreSettings.objects.first().workstation_policy
|
|
||||||
client_policy = client.workstation_policy
|
|
||||||
site_policy = site.workstation_policy
|
|
||||||
|
|
||||||
# check if client/site/agent is blocking inheritance and blank out policies
|
|
||||||
if agent.block_policy_inheritance:
|
|
||||||
site_policy = None
|
|
||||||
client_policy = None
|
|
||||||
default_policy = None
|
|
||||||
elif site.block_policy_inheritance:
|
|
||||||
client_policy = None
|
|
||||||
default_policy = None
|
|
||||||
elif client.block_policy_inheritance:
|
|
||||||
default_policy = None
|
|
||||||
|
|
||||||
# Used to hold the policies that will be applied and the order in which they are applied
|
# Used to hold the policies that will be applied and the order in which they are applied
|
||||||
# Enforced policies are applied first
|
# Enforced policies are applied first
|
||||||
enforced_checks = list()
|
enforced_checks = list()
|
||||||
policy_checks = list()
|
policy_checks = list()
|
||||||
|
|
||||||
if (
|
processed_policies = list()
|
||||||
agent_policy
|
|
||||||
and agent_policy.active
|
for _, policy in policies.items():
|
||||||
and not agent_policy.is_agent_excluded(agent)
|
if policy and policy.active and policy.pk not in processed_policies:
|
||||||
):
|
processed_policies.append(policy.pk)
|
||||||
if agent_policy.enforced:
|
if policy.enforced:
|
||||||
for check in agent_policy.policychecks.all():
|
for check in policy.policychecks.all():
|
||||||
enforced_checks.append(check)
|
enforced_checks.append(check)
|
||||||
else:
|
else:
|
||||||
for check in agent_policy.policychecks.all():
|
for check in policy.policychecks.all():
|
||||||
policy_checks.append(check)
|
policy_checks.append(check)
|
||||||
|
|
||||||
if (
|
if not enforced_checks and not policy_checks:
|
||||||
site_policy
|
return []
|
||||||
and site_policy.active
|
|
||||||
and not site_policy.is_agent_excluded(agent)
|
|
||||||
):
|
|
||||||
if site_policy.enforced:
|
|
||||||
for check in site_policy.policychecks.all():
|
|
||||||
enforced_checks.append(check)
|
|
||||||
else:
|
|
||||||
for check in site_policy.policychecks.all():
|
|
||||||
policy_checks.append(check)
|
|
||||||
|
|
||||||
if (
|
|
||||||
client_policy
|
|
||||||
and client_policy.active
|
|
||||||
and not client_policy.is_agent_excluded(agent)
|
|
||||||
):
|
|
||||||
if client_policy.enforced:
|
|
||||||
for check in client_policy.policychecks.all():
|
|
||||||
enforced_checks.append(check)
|
|
||||||
else:
|
|
||||||
for check in client_policy.policychecks.all():
|
|
||||||
policy_checks.append(check)
|
|
||||||
|
|
||||||
if (
|
|
||||||
default_policy
|
|
||||||
and default_policy.active
|
|
||||||
and not default_policy.is_agent_excluded(agent)
|
|
||||||
):
|
|
||||||
if default_policy.enforced:
|
|
||||||
for check in default_policy.policychecks.all():
|
|
||||||
enforced_checks.append(check)
|
|
||||||
else:
|
|
||||||
for check in default_policy.policychecks.all():
|
|
||||||
policy_checks.append(check)
|
|
||||||
|
|
||||||
# Sorted Checks already added
|
# Sorted Checks already added
|
||||||
added_diskspace_checks = list()
|
added_diskspace_checks: List[str] = list()
|
||||||
added_ping_checks = list()
|
added_ping_checks: List[str] = list()
|
||||||
added_winsvc_checks = list()
|
added_winsvc_checks: List[str] = list()
|
||||||
added_script_checks = list()
|
added_script_checks: List[int] = list()
|
||||||
added_eventlog_checks = list()
|
added_eventlog_checks: List[List[str]] = list()
|
||||||
added_cpuload_checks = list()
|
added_cpuload_checks: List[int] = list()
|
||||||
added_memory_checks = list()
|
added_memory_checks: List[int] = list()
|
||||||
|
|
||||||
# Lists all agent and policy checks that will be created
|
# Lists all agent and policy checks that will be returned
|
||||||
diskspace_checks = list()
|
diskspace_checks: "List[Check]" = list()
|
||||||
ping_checks = list()
|
ping_checks: "List[Check]" = list()
|
||||||
winsvc_checks = list()
|
winsvc_checks: "List[Check]" = list()
|
||||||
script_checks = list()
|
script_checks: "List[Check]" = list()
|
||||||
eventlog_checks = list()
|
eventlog_checks: "List[Check]" = list()
|
||||||
cpuload_checks = list()
|
cpuload_checks: "List[Check]" = list()
|
||||||
memory_checks = list()
|
memory_checks: "List[Check]" = list()
|
||||||
|
|
||||||
|
overridden_checks: List[int] = list()
|
||||||
|
|
||||||
# Loop over checks in with enforced policies first, then non-enforced policies
|
# Loop over checks in with enforced policies first, then non-enforced policies
|
||||||
for check in enforced_checks + agent_checks + policy_checks:
|
for check in enforced_checks + agent_checks + policy_checks:
|
||||||
if check.check_type == "diskspace":
|
if check.check_type == "diskspace" and agent.plat == "windows":
|
||||||
# Check if drive letter was already added
|
# Check if drive letter was already added
|
||||||
if check.disk not in added_diskspace_checks:
|
if check.disk not in added_diskspace_checks:
|
||||||
added_diskspace_checks.append(check.disk)
|
added_diskspace_checks.append(check.disk)
|
||||||
# Dont create the check if it is an agent check
|
# Dont add if check if it is an agent check
|
||||||
if not check.agent:
|
if not check.agent:
|
||||||
diskspace_checks.append(check)
|
diskspace_checks.append(check)
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
check.overriden_by_policy = True
|
overridden_checks.append(check.pk)
|
||||||
check.save()
|
|
||||||
|
|
||||||
if check.check_type == "ping":
|
elif check.check_type == "ping":
|
||||||
# Check if IP/host was already added
|
# Check if IP/host was already added
|
||||||
if check.ip not in added_ping_checks:
|
if check.ip not in added_ping_checks:
|
||||||
added_ping_checks.append(check.ip)
|
added_ping_checks.append(check.ip)
|
||||||
# Dont create the check if it is an agent check
|
# Dont add if the check if it is an agent check
|
||||||
if not check.agent:
|
if not check.agent:
|
||||||
ping_checks.append(check)
|
ping_checks.append(check)
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
check.overriden_by_policy = True
|
overridden_checks.append(check.pk)
|
||||||
check.save()
|
|
||||||
|
|
||||||
if check.check_type == "cpuload":
|
elif check.check_type == "cpuload" and agent.plat == "windows":
|
||||||
# Check if cpuload list is empty
|
# Check if cpuload list is empty
|
||||||
if not added_cpuload_checks:
|
if not added_cpuload_checks:
|
||||||
added_cpuload_checks.append(check)
|
added_cpuload_checks.append(check.pk)
|
||||||
# Dont create the check if it is an agent check
|
# Dont create the check if it is an agent check
|
||||||
if not check.agent:
|
if not check.agent:
|
||||||
cpuload_checks.append(check)
|
cpuload_checks.append(check)
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
check.overriden_by_policy = True
|
overridden_checks.append(check.pk)
|
||||||
check.save()
|
|
||||||
|
|
||||||
if check.check_type == "memory":
|
elif check.check_type == "memory" and agent.plat == "windows":
|
||||||
# Check if memory check list is empty
|
# Check if memory check list is empty
|
||||||
if not added_memory_checks:
|
if not added_memory_checks:
|
||||||
added_memory_checks.append(check)
|
added_memory_checks.append(check.pk)
|
||||||
# Dont create the check if it is an agent check
|
# Dont create the check if it is an agent check
|
||||||
if not check.agent:
|
if not check.agent:
|
||||||
memory_checks.append(check)
|
memory_checks.append(check)
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
check.overriden_by_policy = True
|
overridden_checks.append(check.pk)
|
||||||
check.save()
|
|
||||||
|
|
||||||
if check.check_type == "winsvc":
|
elif check.check_type == "winsvc" and agent.plat == "windows":
|
||||||
# Check if service name was already added
|
# Check if service name was already added
|
||||||
if check.svc_name not in added_winsvc_checks:
|
if check.svc_name not in added_winsvc_checks:
|
||||||
added_winsvc_checks.append(check.svc_name)
|
added_winsvc_checks.append(check.svc_name)
|
||||||
@@ -394,10 +327,11 @@ class Policy(BaseAuditModel):
|
|||||||
if not check.agent:
|
if not check.agent:
|
||||||
winsvc_checks.append(check)
|
winsvc_checks.append(check)
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
check.overriden_by_policy = True
|
overridden_checks.append(check.pk)
|
||||||
check.save()
|
|
||||||
|
|
||||||
if check.check_type == "script":
|
elif check.check_type == "script" and agent.is_supported_script(
|
||||||
|
check.script.supported_platforms
|
||||||
|
):
|
||||||
# Check if script id was already added
|
# Check if script id was already added
|
||||||
if check.script.id not in added_script_checks:
|
if check.script.id not in added_script_checks:
|
||||||
added_script_checks.append(check.script.id)
|
added_script_checks.append(check.script.id)
|
||||||
@@ -405,20 +339,25 @@ class Policy(BaseAuditModel):
|
|||||||
if not check.agent:
|
if not check.agent:
|
||||||
script_checks.append(check)
|
script_checks.append(check)
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
check.overriden_by_policy = True
|
overridden_checks.append(check.pk)
|
||||||
check.save()
|
|
||||||
|
|
||||||
if check.check_type == "eventlog":
|
elif check.check_type == "eventlog" and agent.plat == "windows":
|
||||||
# Check if events were already added
|
# Check if events were already added
|
||||||
if [check.log_name, check.event_id] not in added_eventlog_checks:
|
if [check.log_name, check.event_id] not in added_eventlog_checks:
|
||||||
added_eventlog_checks.append([check.log_name, check.event_id])
|
added_eventlog_checks.append([check.log_name, check.event_id])
|
||||||
if not check.agent:
|
if not check.agent:
|
||||||
eventlog_checks.append(check)
|
eventlog_checks.append(check)
|
||||||
elif check.agent:
|
elif check.agent:
|
||||||
check.overriden_by_policy = True
|
overridden_checks.append(check.pk)
|
||||||
check.save()
|
|
||||||
|
|
||||||
final_list = (
|
if overridden_checks:
|
||||||
|
from checks.models import Check
|
||||||
|
|
||||||
|
Check.objects.filter(pk__in=overridden_checks).update(
|
||||||
|
overridden_by_policy=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
diskspace_checks
|
diskspace_checks
|
||||||
+ ping_checks
|
+ ping_checks
|
||||||
+ cpuload_checks
|
+ cpuload_checks
|
||||||
@@ -427,33 +366,3 @@ class Policy(BaseAuditModel):
|
|||||||
+ script_checks
|
+ script_checks
|
||||||
+ eventlog_checks
|
+ eventlog_checks
|
||||||
)
|
)
|
||||||
|
|
||||||
# remove policy checks from agent that fell out of policy scope
|
|
||||||
agent.agentchecks.filter(
|
|
||||||
managed_by_policy=True,
|
|
||||||
parent_check__in=[
|
|
||||||
checkpk
|
|
||||||
for checkpk in agent_checks_parent_pks
|
|
||||||
if checkpk not in [check.pk for check in final_list]
|
|
||||||
],
|
|
||||||
).delete()
|
|
||||||
|
|
||||||
return [
|
|
||||||
check for check in final_list if check.pk not in agent_checks_parent_pks
|
|
||||||
]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def generate_policy_checks(agent):
|
|
||||||
checks = Policy.cascade_policy_checks(agent)
|
|
||||||
|
|
||||||
if checks:
|
|
||||||
for check in checks:
|
|
||||||
check.create_policy_check(agent)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def generate_policy_tasks(agent):
|
|
||||||
tasks = Policy.cascade_policy_tasks(agent)
|
|
||||||
|
|
||||||
if tasks:
|
|
||||||
for task in tasks:
|
|
||||||
task.create_policy_task(agent)
|
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ from tacticalrmm.permissions import _has_perm
|
|||||||
|
|
||||||
|
|
||||||
class AutomationPolicyPerms(permissions.BasePermission):
|
class AutomationPolicyPerms(permissions.BasePermission):
|
||||||
def has_permission(self, r, view):
|
def has_permission(self, r, view) -> bool:
|
||||||
if r.method == "GET":
|
if r.method == "GET":
|
||||||
return True
|
return _has_perm(r, "can_list_automation_policies")
|
||||||
|
else:
|
||||||
return _has_perm(r, "can_manage_automation_policies")
|
return _has_perm(r, "can_manage_automation_policies")
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
|
from agents.serializers import AgentHostnameSerializer
|
||||||
|
from autotasks.models import TaskResult
|
||||||
|
from checks.models import CheckResult
|
||||||
|
from clients.models import Client
|
||||||
|
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
|
||||||
from rest_framework.serializers import (
|
from rest_framework.serializers import (
|
||||||
ModelSerializer,
|
ModelSerializer,
|
||||||
ReadOnlyField,
|
ReadOnlyField,
|
||||||
SerializerMethodField,
|
SerializerMethodField,
|
||||||
)
|
)
|
||||||
|
|
||||||
from agents.serializers import AgentHostnameSerializer
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
from checks.models import Check
|
|
||||||
from clients.models import Client
|
|
||||||
from clients.serializers import ClientSerializer, SiteSerializer
|
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
from .models import Policy
|
from .models import Policy
|
||||||
@@ -21,25 +20,70 @@ class PolicySerializer(ModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class PolicyTableSerializer(ModelSerializer):
|
class PolicyTableSerializer(ModelSerializer):
|
||||||
|
|
||||||
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
||||||
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
||||||
agents_count = SerializerMethodField(read_only=True)
|
agents_count = SerializerMethodField(read_only=True)
|
||||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||||
alert_template = ReadOnlyField(source="alert_template.id")
|
alert_template = ReadOnlyField(source="alert_template.id")
|
||||||
excluded_clients = ClientSerializer(many=True)
|
|
||||||
excluded_sites = SiteSerializer(many=True)
|
|
||||||
excluded_agents = AgentHostnameSerializer(many=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Policy
|
model = Policy
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
depth = 1
|
|
||||||
|
|
||||||
def get_agents_count(self, policy):
|
def get_agents_count(self, policy):
|
||||||
return policy.related_agents().count()
|
return policy.related_agents().count()
|
||||||
|
|
||||||
|
|
||||||
|
class PolicyRelatedSerializer(ModelSerializer):
|
||||||
|
workstation_clients = SerializerMethodField()
|
||||||
|
server_clients = SerializerMethodField()
|
||||||
|
workstation_sites = SerializerMethodField()
|
||||||
|
server_sites = SerializerMethodField()
|
||||||
|
agents = SerializerMethodField()
|
||||||
|
|
||||||
|
def get_agents(self, policy):
|
||||||
|
return AgentHostnameSerializer(
|
||||||
|
policy.agents.filter_by_role(self.context["user"]).only(
|
||||||
|
"agent_id", "hostname"
|
||||||
|
),
|
||||||
|
many=True,
|
||||||
|
).data
|
||||||
|
|
||||||
|
def get_workstation_clients(self, policy):
|
||||||
|
return ClientMinimumSerializer(
|
||||||
|
policy.workstation_clients.filter_by_role(self.context["user"]), many=True
|
||||||
|
).data
|
||||||
|
|
||||||
|
def get_server_clients(self, policy):
|
||||||
|
return ClientMinimumSerializer(
|
||||||
|
policy.server_clients.filter_by_role(self.context["user"]), many=True
|
||||||
|
).data
|
||||||
|
|
||||||
|
def get_workstation_sites(self, policy):
|
||||||
|
return SiteMinimumSerializer(
|
||||||
|
policy.workstation_sites.filter_by_role(self.context["user"]), many=True
|
||||||
|
).data
|
||||||
|
|
||||||
|
def get_server_sites(self, policy):
|
||||||
|
return SiteMinimumSerializer(
|
||||||
|
policy.server_sites.filter_by_role(self.context["user"]), many=True
|
||||||
|
).data
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Policy
|
||||||
|
fields = (
|
||||||
|
"pk",
|
||||||
|
"name",
|
||||||
|
"workstation_clients",
|
||||||
|
"workstation_sites",
|
||||||
|
"server_clients",
|
||||||
|
"server_sites",
|
||||||
|
"agents",
|
||||||
|
"is_default_server_policy",
|
||||||
|
"is_default_workstation_policy",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PolicyOverviewSerializer(ModelSerializer):
|
class PolicyOverviewSerializer(ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Client
|
model = Client
|
||||||
@@ -48,49 +92,21 @@ class PolicyOverviewSerializer(ModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class PolicyCheckStatusSerializer(ModelSerializer):
|
class PolicyCheckStatusSerializer(ModelSerializer):
|
||||||
|
|
||||||
hostname = ReadOnlyField(source="agent.hostname")
|
hostname = ReadOnlyField(source="agent.hostname")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Check
|
model = CheckResult
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class PolicyTaskStatusSerializer(ModelSerializer):
|
class PolicyTaskStatusSerializer(ModelSerializer):
|
||||||
|
|
||||||
hostname = ReadOnlyField(source="agent.hostname")
|
hostname = ReadOnlyField(source="agent.hostname")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AutomatedTask
|
model = TaskResult
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class PolicyCheckSerializer(ModelSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Check
|
|
||||||
fields = (
|
|
||||||
"id",
|
|
||||||
"check_type",
|
|
||||||
"readable_desc",
|
|
||||||
"assignedtask",
|
|
||||||
"text_alert",
|
|
||||||
"email_alert",
|
|
||||||
"dashboard_alert",
|
|
||||||
)
|
|
||||||
depth = 1
|
|
||||||
|
|
||||||
|
|
||||||
class AutoTasksFieldSerializer(ModelSerializer):
|
|
||||||
assigned_check = PolicyCheckSerializer(read_only=True)
|
|
||||||
script = ReadOnlyField(source="script.id")
|
|
||||||
custom_field = ReadOnlyField(source="custom_field.id")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = AutomatedTask
|
|
||||||
fields = "__all__"
|
|
||||||
depth = 1
|
|
||||||
|
|
||||||
|
|
||||||
class PolicyAuditSerializer(ModelSerializer):
|
class PolicyAuditSerializer(ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Policy
|
model = Policy
|
||||||
|
|||||||
@@ -1,153 +1,20 @@
|
|||||||
from typing import Any, Dict, List, Union
|
|
||||||
|
|
||||||
from tacticalrmm.celery import app
|
from tacticalrmm.celery import app
|
||||||
|
|
||||||
|
|
||||||
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
|
|
||||||
def generate_agent_checks_task(
|
|
||||||
policy: int = None,
|
|
||||||
site: int = None,
|
|
||||||
client: int = None,
|
|
||||||
agents: List[int] = list(),
|
|
||||||
all: bool = False,
|
|
||||||
create_tasks: bool = False,
|
|
||||||
) -> Union[str, None]:
|
|
||||||
from agents.models import Agent
|
|
||||||
from automation.models import Policy
|
|
||||||
|
|
||||||
p = Policy.objects.get(pk=policy) if policy else None
|
|
||||||
|
|
||||||
# generate checks on all agents if all is specified or if policy is default server/workstation policy
|
|
||||||
if (p and p.is_default_server_policy and p.is_default_workstation_policy) or all:
|
|
||||||
a = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
|
||||||
|
|
||||||
# generate checks on all servers if policy is a default servers policy
|
|
||||||
elif p and p.is_default_server_policy:
|
|
||||||
a = Agent.objects.filter(monitoring_type="server").only("pk", "monitoring_type")
|
|
||||||
|
|
||||||
# generate checks on all workstations if policy is a default workstations policy
|
|
||||||
elif p and p.is_default_workstation_policy:
|
|
||||||
a = Agent.objects.filter(monitoring_type="workstation").only(
|
|
||||||
"pk", "monitoring_type"
|
|
||||||
)
|
|
||||||
|
|
||||||
# generate checks on a list of supplied agents
|
|
||||||
elif agents:
|
|
||||||
a = Agent.objects.filter(pk__in=agents)
|
|
||||||
|
|
||||||
# generate checks on agents affected by supplied policy
|
|
||||||
elif policy:
|
|
||||||
a = p.related_agents().only("pk")
|
|
||||||
|
|
||||||
# generate checks that has specified site
|
|
||||||
elif site:
|
|
||||||
a = Agent.objects.filter(site_id=site)
|
|
||||||
|
|
||||||
# generate checks that has specified client
|
|
||||||
elif client:
|
|
||||||
a = Agent.objects.filter(site__client_id=client)
|
|
||||||
else:
|
|
||||||
a = []
|
|
||||||
|
|
||||||
for agent in a:
|
|
||||||
agent.generate_checks_from_policies()
|
|
||||||
if create_tasks:
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
@app.task(
|
|
||||||
acks_late=True, retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5}
|
|
||||||
)
|
|
||||||
# updates policy managed check fields on agents
|
|
||||||
def update_policy_check_fields_task(check: int) -> str:
|
|
||||||
from checks.models import Check
|
|
||||||
|
|
||||||
c: Check = Check.objects.get(pk=check)
|
|
||||||
update_fields: Dict[Any, Any] = {}
|
|
||||||
|
|
||||||
for field in c.policy_fields_to_copy:
|
|
||||||
update_fields[field] = getattr(c, field)
|
|
||||||
|
|
||||||
Check.objects.filter(parent_check=check).update(**update_fields)
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
|
|
||||||
# generates policy tasks on agents affected by a policy
|
|
||||||
def generate_agent_autotasks_task(policy: int = None) -> str:
|
|
||||||
from agents.models import Agent
|
|
||||||
from automation.models import Policy
|
|
||||||
|
|
||||||
p: Policy = Policy.objects.get(pk=policy)
|
|
||||||
|
|
||||||
if p and p.is_default_server_policy and p.is_default_workstation_policy:
|
|
||||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
|
||||||
elif p and p.is_default_server_policy:
|
|
||||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
|
||||||
"pk", "monitoring_type"
|
|
||||||
)
|
|
||||||
elif p and p.is_default_workstation_policy:
|
|
||||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
|
||||||
"pk", "monitoring_type"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
agents = p.related_agents().only("pk")
|
|
||||||
|
|
||||||
for agent in agents:
|
|
||||||
agent.generate_tasks_from_policies()
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
@app.task(
|
|
||||||
acks_late=True,
|
|
||||||
retry_backoff=5,
|
|
||||||
retry_jitter=True,
|
|
||||||
retry_kwargs={"max_retries": 5},
|
|
||||||
)
|
|
||||||
def delete_policy_autotasks_task(task: int) -> str:
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
|
|
||||||
for t in AutomatedTask.objects.filter(parent_task=task):
|
|
||||||
t.delete_task_on_agent()
|
|
||||||
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
|
|
||||||
@app.task
|
@app.task
|
||||||
def run_win_policy_autotasks_task(task: int) -> str:
|
def run_win_policy_autotasks_task(task: int) -> str:
|
||||||
from autotasks.models import AutomatedTask
|
from autotasks.models import AutomatedTask
|
||||||
|
|
||||||
for t in AutomatedTask.objects.filter(parent_task=task):
|
try:
|
||||||
t.run_win_task()
|
policy_task = AutomatedTask.objects.get(pk=task)
|
||||||
|
except AutomatedTask.DoesNotExist:
|
||||||
return "ok"
|
return "AutomatedTask not found"
|
||||||
|
|
||||||
|
if not policy_task.policy:
|
||||||
@app.task(
|
return "AutomatedTask must be a policy"
|
||||||
acks_late=True,
|
|
||||||
retry_backoff=5,
|
# get related agents from policy
|
||||||
retry_jitter=True,
|
for agent in policy_task.policy.related_agents():
|
||||||
retry_kwargs={"max_retries": 5},
|
policy_task.run_win_task(agent)
|
||||||
)
|
|
||||||
def update_policy_autotasks_fields_task(task: int, update_agent: bool = False) -> str:
|
|
||||||
from autotasks.models import AutomatedTask
|
|
||||||
|
|
||||||
t = AutomatedTask.objects.get(pk=task)
|
|
||||||
update_fields: Dict[str, Any] = {}
|
|
||||||
|
|
||||||
for field in t.policy_fields_to_copy:
|
|
||||||
update_fields[field] = getattr(t, field)
|
|
||||||
|
|
||||||
AutomatedTask.objects.filter(parent_task=task).update(**update_fields)
|
|
||||||
|
|
||||||
if update_agent:
|
|
||||||
for t in AutomatedTask.objects.filter(parent_task=task).exclude(
|
|
||||||
sync_status="initial"
|
|
||||||
):
|
|
||||||
t.modify_task_on_agent()
|
|
||||||
|
|
||||||
return "ok"
|
return "ok"
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,5 @@
|
|||||||
|
from autotasks.views import GetAddAutoTasks
|
||||||
|
from checks.views import GetAddChecks
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
@@ -7,13 +9,14 @@ urlpatterns = [
|
|||||||
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
||||||
path("policies/overview/", views.OverviewPolicy.as_view()),
|
path("policies/overview/", views.OverviewPolicy.as_view()),
|
||||||
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
||||||
path("sync/", views.PolicySync.as_view()),
|
# alias to get policy checks
|
||||||
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
path("policies/<int:policy>/checks/", GetAddChecks.as_view()),
|
||||||
path("<int:pk>/policyautomatedtasks/", views.PolicyAutoTask.as_view()),
|
# alias to get policy tasks
|
||||||
path("policycheckstatus/<int:check>/check/", views.PolicyCheck.as_view()),
|
path("policies/<int:policy>/tasks/", GetAddAutoTasks.as_view()),
|
||||||
path("policyautomatedtaskstatus/<int:task>/task/", views.PolicyAutoTask.as_view()),
|
path("checks/<int:check>/status/", views.PolicyCheck.as_view()),
|
||||||
path("runwintask/<int:task>/", views.PolicyAutoTask.as_view()),
|
path("tasks/<int:task>/status/", views.PolicyAutoTask.as_view()),
|
||||||
path("winupdatepolicy/", views.UpdatePatchPolicy.as_view()),
|
path("tasks/<int:task>/run/", views.PolicyAutoTask.as_view()),
|
||||||
path("winupdatepolicy/<int:patchpolicy>/", views.UpdatePatchPolicy.as_view()),
|
path("patchpolicy/", views.UpdatePatchPolicy.as_view()),
|
||||||
path("winupdatepolicy/reset/", views.UpdatePatchPolicy.as_view()),
|
path("patchpolicy/<int:pk>/", views.UpdatePatchPolicy.as_view()),
|
||||||
|
path("patchpolicy/reset/", views.ResetPatchPolicy.as_view()),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,24 +1,23 @@
|
|||||||
from agents.models import Agent
|
from agents.models import Agent
|
||||||
from agents.serializers import AgentHostnameSerializer
|
from autotasks.models import TaskResult
|
||||||
from autotasks.models import AutomatedTask
|
from checks.models import CheckResult
|
||||||
from checks.models import Check
|
|
||||||
from clients.models import Client
|
from clients.models import Client
|
||||||
from clients.serializers import ClientSerializer, SiteSerializer
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
from rest_framework.exceptions import PermissionDenied
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from tacticalrmm.utils import notify_error
|
|
||||||
from winupdate.models import WinUpdatePolicy
|
from winupdate.models import WinUpdatePolicy
|
||||||
from winupdate.serializers import WinUpdatePolicySerializer
|
from winupdate.serializers import WinUpdatePolicySerializer
|
||||||
|
|
||||||
|
from tacticalrmm.permissions import _has_perm_on_client, _has_perm_on_site
|
||||||
|
|
||||||
from .models import Policy
|
from .models import Policy
|
||||||
from .permissions import AutomationPolicyPerms
|
from .permissions import AutomationPolicyPerms
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
AutoTasksFieldSerializer,
|
|
||||||
PolicyCheckSerializer,
|
|
||||||
PolicyCheckStatusSerializer,
|
PolicyCheckStatusSerializer,
|
||||||
PolicyOverviewSerializer,
|
PolicyOverviewSerializer,
|
||||||
|
PolicyRelatedSerializer,
|
||||||
PolicySerializer,
|
PolicySerializer,
|
||||||
PolicyTableSerializer,
|
PolicyTableSerializer,
|
||||||
PolicyTaskStatusSerializer,
|
PolicyTaskStatusSerializer,
|
||||||
@@ -31,7 +30,11 @@ class GetAddPolicies(APIView):
|
|||||||
def get(self, request):
|
def get(self, request):
|
||||||
policies = Policy.objects.all()
|
policies = Policy.objects.all()
|
||||||
|
|
||||||
return Response(PolicyTableSerializer(policies, many=True).data)
|
return Response(
|
||||||
|
PolicyTableSerializer(
|
||||||
|
policies, context={"user": request.user}, many=True
|
||||||
|
).data
|
||||||
|
)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
serializer = PolicySerializer(data=request.data, partial=True)
|
serializer = PolicySerializer(data=request.data, partial=True)
|
||||||
@@ -47,8 +50,8 @@ class GetAddPolicies(APIView):
|
|||||||
check.create_policy_check(policy=policy)
|
check.create_policy_check(policy=policy)
|
||||||
|
|
||||||
tasks = copyPolicy.autotasks.all()
|
tasks = copyPolicy.autotasks.all()
|
||||||
|
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
|
if not task.assigned_check:
|
||||||
task.create_policy_task(policy=policy)
|
task.create_policy_task(policy=policy)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
@@ -63,22 +66,12 @@ class GetUpdateDeletePolicy(APIView):
|
|||||||
return Response(PolicySerializer(policy).data)
|
return Response(PolicySerializer(policy).data)
|
||||||
|
|
||||||
def put(self, request, pk):
|
def put(self, request, pk):
|
||||||
from .tasks import generate_agent_checks_task
|
|
||||||
|
|
||||||
policy = get_object_or_404(Policy, pk=pk)
|
policy = get_object_or_404(Policy, pk=pk)
|
||||||
|
|
||||||
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
# check for excluding objects and in the request and if present generate policies
|
|
||||||
if (
|
|
||||||
"excluded_sites" in request.data.keys()
|
|
||||||
or "excluded_clients" in request.data.keys()
|
|
||||||
or "excluded_agents" in request.data.keys()
|
|
||||||
):
|
|
||||||
generate_agent_checks_task.delay(policy=pk, create_tasks=True)
|
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
def delete(self, request, pk):
|
def delete(self, request, pk):
|
||||||
@@ -87,34 +80,15 @@ class GetUpdateDeletePolicy(APIView):
|
|||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
class PolicySync(APIView):
|
|
||||||
def post(self, request):
|
|
||||||
if "policy" in request.data.keys():
|
|
||||||
from automation.tasks import generate_agent_checks_task
|
|
||||||
|
|
||||||
generate_agent_checks_task.delay(
|
|
||||||
policy=request.data["policy"], create_tasks=True
|
|
||||||
)
|
|
||||||
return Response("ok")
|
|
||||||
|
|
||||||
else:
|
|
||||||
return notify_error("The request was invalid")
|
|
||||||
|
|
||||||
|
|
||||||
class PolicyAutoTask(APIView):
|
class PolicyAutoTask(APIView):
|
||||||
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
|
||||||
# tasks associated with policy
|
|
||||||
def get(self, request, pk):
|
|
||||||
tasks = AutomatedTask.objects.filter(policy=pk)
|
|
||||||
return Response(AutoTasksFieldSerializer(tasks, many=True).data)
|
|
||||||
|
|
||||||
# get status of all tasks
|
# get status of all tasks
|
||||||
def patch(self, request, task):
|
def get(self, request, task):
|
||||||
tasks = AutomatedTask.objects.filter(parent_task=task)
|
tasks = TaskResult.objects.filter(task=task)
|
||||||
return Response(PolicyTaskStatusSerializer(tasks, many=True).data)
|
return Response(PolicyTaskStatusSerializer(tasks, many=True).data)
|
||||||
|
|
||||||
# bulk run win tasks associated with policy
|
# bulk run win tasks associated with policy
|
||||||
def put(self, request, task):
|
def post(self, request, task):
|
||||||
from .tasks import run_win_policy_autotasks_task
|
from .tasks import run_win_policy_autotasks_task
|
||||||
|
|
||||||
run_win_policy_autotasks_task.delay(task=task)
|
run_win_policy_autotasks_task.delay(task=task)
|
||||||
@@ -124,12 +98,8 @@ class PolicyAutoTask(APIView):
|
|||||||
class PolicyCheck(APIView):
|
class PolicyCheck(APIView):
|
||||||
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
|
||||||
|
|
||||||
def get(self, request, pk):
|
def get(self, request, check):
|
||||||
checks = Check.objects.filter(policy__pk=pk, agent=None)
|
checks = CheckResult.objects.filter(assigned_check=check)
|
||||||
return Response(PolicyCheckSerializer(checks, many=True).data)
|
|
||||||
|
|
||||||
def patch(self, request, check):
|
|
||||||
checks = Check.objects.filter(parent_check=check)
|
|
||||||
return Response(PolicyCheckStatusSerializer(checks, many=True).data)
|
return Response(PolicyCheckStatusSerializer(checks, many=True).data)
|
||||||
|
|
||||||
|
|
||||||
@@ -143,8 +113,6 @@ class OverviewPolicy(APIView):
|
|||||||
class GetRelated(APIView):
|
class GetRelated(APIView):
|
||||||
def get(self, request, pk):
|
def get(self, request, pk):
|
||||||
|
|
||||||
response = {}
|
|
||||||
|
|
||||||
policy = (
|
policy = (
|
||||||
Policy.objects.filter(pk=pk)
|
Policy.objects.filter(pk=pk)
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
@@ -156,43 +124,9 @@ class GetRelated(APIView):
|
|||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
|
|
||||||
response["default_server_policy"] = policy.is_default_server_policy
|
return Response(
|
||||||
response["default_workstation_policy"] = policy.is_default_workstation_policy
|
PolicyRelatedSerializer(policy, context={"user": request.user}).data
|
||||||
|
)
|
||||||
response["server_clients"] = ClientSerializer(
|
|
||||||
policy.server_clients.all(), many=True
|
|
||||||
).data
|
|
||||||
response["workstation_clients"] = ClientSerializer(
|
|
||||||
policy.workstation_clients.all(), many=True
|
|
||||||
).data
|
|
||||||
|
|
||||||
filtered_server_sites = list()
|
|
||||||
filtered_workstation_sites = list()
|
|
||||||
|
|
||||||
for client in policy.server_clients.all():
|
|
||||||
for site in client.sites.all():
|
|
||||||
if site not in policy.server_sites.all():
|
|
||||||
filtered_server_sites.append(site)
|
|
||||||
|
|
||||||
response["server_sites"] = SiteSerializer(
|
|
||||||
filtered_server_sites + list(policy.server_sites.all()), many=True
|
|
||||||
).data
|
|
||||||
|
|
||||||
for client in policy.workstation_clients.all():
|
|
||||||
for site in client.sites.all():
|
|
||||||
if site not in policy.workstation_sites.all():
|
|
||||||
filtered_workstation_sites.append(site)
|
|
||||||
|
|
||||||
response["workstation_sites"] = SiteSerializer(
|
|
||||||
filtered_workstation_sites + list(policy.workstation_sites.all()), many=True
|
|
||||||
).data
|
|
||||||
|
|
||||||
response["agents"] = AgentHostnameSerializer(
|
|
||||||
policy.related_agents().only("pk", "hostname"),
|
|
||||||
many=True,
|
|
||||||
).data
|
|
||||||
|
|
||||||
return Response(response)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdatePatchPolicy(APIView):
|
class UpdatePatchPolicy(APIView):
|
||||||
@@ -203,14 +137,14 @@ class UpdatePatchPolicy(APIView):
|
|||||||
|
|
||||||
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
|
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
serializer.policy = policy # type: ignore
|
serializer.policy = policy
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
# update patch policy
|
# update patch policy
|
||||||
def put(self, request, patchpolicy):
|
def put(self, request, pk):
|
||||||
policy = get_object_or_404(WinUpdatePolicy, pk=patchpolicy)
|
policy = get_object_or_404(WinUpdatePolicy, pk=pk)
|
||||||
|
|
||||||
serializer = WinUpdatePolicySerializer(
|
serializer = WinUpdatePolicySerializer(
|
||||||
instance=policy, data=request.data, partial=True
|
instance=policy, data=request.data, partial=True
|
||||||
@@ -220,20 +154,41 @@ class UpdatePatchPolicy(APIView):
|
|||||||
|
|
||||||
return Response("ok")
|
return Response("ok")
|
||||||
|
|
||||||
# bulk reset agent patch policy
|
# delete patch policy
|
||||||
def patch(self, request):
|
def delete(self, request, pk):
|
||||||
|
get_object_or_404(WinUpdatePolicy, pk=pk).delete()
|
||||||
|
|
||||||
|
return Response("ok")
|
||||||
|
|
||||||
|
|
||||||
|
class ResetPatchPolicy(APIView):
|
||||||
|
# bulk reset agent patch policy
|
||||||
|
def post(self, request):
|
||||||
|
|
||||||
agents = None
|
|
||||||
if "client" in request.data:
|
if "client" in request.data:
|
||||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
if not _has_perm_on_client(request.user, request.data["client"]):
|
||||||
site__client_id=request.data["client"]
|
raise PermissionDenied()
|
||||||
|
|
||||||
|
agents = (
|
||||||
|
Agent.objects.filter_by_role(request.user) # type: ignore
|
||||||
|
.prefetch_related("winupdatepolicy")
|
||||||
|
.filter(site__client_id=request.data["client"])
|
||||||
)
|
)
|
||||||
elif "site" in request.data:
|
elif "site" in request.data:
|
||||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
if not _has_perm_on_site(request.user, request.data["site"]):
|
||||||
site_id=request.data["site"]
|
raise PermissionDenied()
|
||||||
|
|
||||||
|
agents = (
|
||||||
|
Agent.objects.filter_by_role(request.user) # type: ignore
|
||||||
|
.prefetch_related("winupdatepolicy")
|
||||||
|
.filter(site_id=request.data["site"])
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
|
agents = (
|
||||||
|
Agent.objects.filter_by_role(request.user) # type: ignore
|
||||||
|
.prefetch_related("winupdatepolicy")
|
||||||
|
.only("pk")
|
||||||
|
)
|
||||||
|
|
||||||
for agent in agents:
|
for agent in agents:
|
||||||
winupdatepolicy = agent.winupdatepolicy.get()
|
winupdatepolicy = agent.winupdatepolicy.get()
|
||||||
@@ -258,10 +213,4 @@ class UpdatePatchPolicy(APIView):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response("ok")
|
return Response("The patch policy on the affected agents has been reset.")
|
||||||
|
|
||||||
# delete patch policy
|
|
||||||
def delete(self, request, patchpolicy):
|
|
||||||
get_object_or_404(WinUpdatePolicy, pk=patchpolicy).delete()
|
|
||||||
|
|
||||||
return Response("ok")
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from .models import AutomatedTask
|
from .models import AutomatedTask, TaskResult
|
||||||
|
|
||||||
admin.site.register(AutomatedTask)
|
admin.site.register(AutomatedTask)
|
||||||
|
admin.site.register(TaskResult)
|
||||||
|
|||||||
5
api/tacticalrmm/autotasks/baker_recipes.py
Normal file
5
api/tacticalrmm/autotasks/baker_recipes.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from model_bakery.recipe import Recipe
|
||||||
|
|
||||||
|
task = Recipe(
|
||||||
|
"autotasks.AutomatedTask",
|
||||||
|
)
|
||||||
@@ -1,17 +1,12 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from agents.models import Agent
|
|
||||||
from autotasks.tasks import remove_orphaned_win_tasks
|
from autotasks.tasks import remove_orphaned_win_tasks
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Checks for orphaned tasks on all agents and removes them"
|
help = "Checks for orphaned tasks on all agents and removes them"
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time")
|
remove_orphaned_win_tasks.s()
|
||||||
online = [i for i in agents if i.status == "online"]
|
|
||||||
for agent in online:
|
|
||||||
remove_orphaned_win_tasks.delay(agent.pk)
|
|
||||||
|
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.SUCCESS(
|
self.style.SUCCESS(
|
||||||
|
|||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.6 on 2021-09-17 19:54
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("autotasks", "0022_automatedtask_collector_all_output"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="automatedtask",
|
||||||
|
name="created_by",
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="automatedtask",
|
||||||
|
name="modified_by",
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,87 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2021-12-14 00:40
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0023_auto_20210917_1954'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='run_time_days',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='actions',
|
||||||
|
field=models.JSONField(default=dict),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='daily_interval',
|
||||||
|
field=models.PositiveSmallIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='expire_date',
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='monthly_days_of_month',
|
||||||
|
field=models.PositiveIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='monthly_months_of_year',
|
||||||
|
field=models.PositiveIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='monthly_weeks_of_month',
|
||||||
|
field=models.PositiveSmallIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='random_task_delay',
|
||||||
|
field=models.CharField(blank=True, max_length=10, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='stop_task_at_duration_end',
|
||||||
|
field=models.BooleanField(blank=True, default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='task_instance_policy',
|
||||||
|
field=models.PositiveSmallIntegerField(blank=True, default=1),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='task_repetition_duration',
|
||||||
|
field=models.CharField(blank=True, max_length=10, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='task_repetition_interval',
|
||||||
|
field=models.CharField(blank=True, max_length=10, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='weekly_interval',
|
||||||
|
field=models.PositiveSmallIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='task_type',
|
||||||
|
field=models.CharField(choices=[('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('monthlydow', 'Monthly Day of Week'), ('checkfailure', 'On Check Failure'), ('manual', 'Manual'), ('runonce', 'Run Once')], default='manual', max_length=100),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='timeout',
|
||||||
|
field=models.PositiveIntegerField(blank=True, default=120),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.10 on 2021-12-29 14:57
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0024_auto_20211214_0040'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='continue_on_error',
|
||||||
|
field=models.BooleanField(default=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.10 on 2021-12-30 14:46
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0025_automatedtask_continue_on_error'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='monthly_days_of_month',
|
||||||
|
field=models.PositiveBigIntegerField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 3.2.11 on 2022-01-07 06:43
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0026_alter_automatedtask_monthly_days_of_month'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='daily_interval',
|
||||||
|
field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(255)]),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='weekly_interval',
|
||||||
|
field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(52)]),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.11 on 2022-01-09 21:27
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0027_auto_20220107_0643'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='actions',
|
||||||
|
field=models.JSONField(default=list),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.10 on 2022-01-10 01:48
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0028_alter_automatedtask_actions'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='task_type',
|
||||||
|
field=models.CharField(choices=[('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('monthlydow', 'Monthly Day of Week'), ('checkfailure', 'On Check Failure'), ('manual', 'Manual'), ('runonce', 'Run Once'), ('scheduled', 'Scheduled')], default='manual', max_length=100),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,99 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-01 22:44
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("checks", "0025_auto_20210917_1954"),
|
||||||
|
("agents", "0046_alter_agenthistory_command"),
|
||||||
|
("autotasks", "0029_alter_automatedtask_task_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="automatedtask",
|
||||||
|
name="retvalue",
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="automatedtask",
|
||||||
|
name="assigned_check",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="assignedtasks",
|
||||||
|
to="checks.check",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="automatedtask",
|
||||||
|
name="win_task_name",
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="TaskResult",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("retcode", models.IntegerField(blank=True, null=True)),
|
||||||
|
("stdout", models.TextField(blank=True, null=True)),
|
||||||
|
("stderr", models.TextField(blank=True, null=True)),
|
||||||
|
("execution_time", models.CharField(default="0.0000", max_length=100)),
|
||||||
|
("last_run", models.DateTimeField(blank=True, null=True)),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("passing", "Passing"),
|
||||||
|
("failing", "Failing"),
|
||||||
|
("pending", "Pending"),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
max_length=30,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"sync_status",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("synced", "Synced With Agent"),
|
||||||
|
("notsynced", "Waiting On Agent Checkin"),
|
||||||
|
("pendingdeletion", "Pending Deletion on Agent"),
|
||||||
|
("initial", "Initial Task Sync"),
|
||||||
|
],
|
||||||
|
default="initial",
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"agent",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="taskresults",
|
||||||
|
to="agents.agent",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"task",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="taskresults",
|
||||||
|
to="autotasks.automatedtask",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("agent", "task")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-01 22:49
|
||||||
|
|
||||||
|
from django.db import migrations, transaction
|
||||||
|
from django.db.utils import IntegrityError
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_task_results(apps, schema_editor):
|
||||||
|
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
|
||||||
|
TaskResult = apps.get_model("autotasks", "TaskResult")
|
||||||
|
for task in AutomatedTask.objects.exclude(agent=None):
|
||||||
|
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
if task.managed_by_policy:
|
||||||
|
TaskResult.objects.create(
|
||||||
|
task_id=task.parent_task,
|
||||||
|
agent_id=task.agent_id,
|
||||||
|
retcode=task.retcode,
|
||||||
|
stdout=task.stdout,
|
||||||
|
stderr=task.stderr,
|
||||||
|
execution_time=task.execution_time,
|
||||||
|
last_run=task.last_run,
|
||||||
|
status=task.status,
|
||||||
|
sync_status=task.sync_status,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
TaskResult.objects.create(
|
||||||
|
task_id=task.id,
|
||||||
|
agent_id=task.agent.id,
|
||||||
|
retcode=task.retcode,
|
||||||
|
stdout=task.stdout,
|
||||||
|
stderr=task.stderr,
|
||||||
|
execution_time=task.execution_time,
|
||||||
|
last_run=task.last_run,
|
||||||
|
status=task.status,
|
||||||
|
sync_status=task.sync_status,
|
||||||
|
)
|
||||||
|
except IntegrityError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
atomic = False
|
||||||
|
dependencies = [
|
||||||
|
("autotasks", "0030_auto_20220401_2244"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(migrate_task_results),
|
||||||
|
]
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-01 23:01
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0031_auto_20220401_2249'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='execution_time',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='last_run',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='parent_task',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='retcode',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='status',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='stderr',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='stdout',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='sync_status',
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-02 00:41
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from django.utils.timezone import make_aware
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_script_data(apps, schema_editor):
|
||||||
|
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
|
||||||
|
# convert autotask to the new format
|
||||||
|
for task in AutomatedTask.objects.all():
|
||||||
|
try:
|
||||||
|
edited = False
|
||||||
|
|
||||||
|
# convert scheduled task_type
|
||||||
|
if task.task_type == "scheduled":
|
||||||
|
task.task_type = "daily"
|
||||||
|
task.run_time_date = make_aware(task.run_time_minute.strptime("%H:%M"))
|
||||||
|
task.daily_interval = 1
|
||||||
|
edited = True
|
||||||
|
|
||||||
|
# convert actions
|
||||||
|
if not task.actions:
|
||||||
|
if not task.script:
|
||||||
|
task.delete()
|
||||||
|
|
||||||
|
task.actions = [
|
||||||
|
{
|
||||||
|
"type": "script",
|
||||||
|
"script": task.script.pk,
|
||||||
|
"script_args": task.script_args,
|
||||||
|
"timeout": task.timeout,
|
||||||
|
"name": task.script.name,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
edited = True
|
||||||
|
|
||||||
|
if edited:
|
||||||
|
task.save()
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("autotasks", "0032_auto_20220401_2301"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(migrate_script_data),
|
||||||
|
]
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.2.12 on 2022-04-02 00:46
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('autotasks', '0033_auto_20220402_0041'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='script',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='script_args',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='automatedtask',
|
||||||
|
name='timeout',
|
||||||
|
),
|
||||||
|
]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user