Compare commits

...

273 Commits

Author SHA1 Message Date
wh1te909
6ae2da22c1 Release 0.13.4 2022-05-15 07:16:14 +00:00
wh1te909
cef1ab9512 bump versions 2022-05-15 07:01:47 +00:00
wh1te909
94f02bfca3 only if exists 2022-05-15 06:59:00 +00:00
sadnub
a941bb1744 disable auto quoting when using variable substitution on batch scripts. Fixes #1020 2022-05-14 22:45:25 -04:00
sadnub
6ff591427a Add watcher in agent view for route changes and set new active agent. Fixes #1110 2022-05-14 22:23:03 -04:00
sadnub
809e172280 Fixes check policy copy issue where copied checks are all of type diskspace 2022-05-14 22:01:29 -04:00
wh1te909
17aedae0a9 recreate env 2022-05-14 23:59:32 +00:00
wh1te909
ef817ccb3a isort 2022-05-14 23:30:01 +00:00
wh1te909
0fb55b0bee remove middleware 2022-05-14 23:23:24 +00:00
wh1te909
a1a6eddc31 update reqs 2022-05-14 23:00:31 +00:00
wh1te909
ff3d0b6b57 handle empty payload, add monitoring endpoint 2022-05-13 00:50:53 +00:00
wh1te909
dd64cef4c4 optimize endpoints, remove deprecated endpoint, start reworking tests 2022-05-11 22:38:03 +00:00
Dan
9796848079 Merge pull request #1127 from silversword411/develop
typo fix
2022-05-11 14:54:05 -07:00
silversword411
fea7eb4312 typo fix 2022-05-11 17:10:06 -04:00
wh1te909
c12cd0e755 fix task run doesn't show name in history tab fixes #1097 2022-05-10 19:21:49 +00:00
wh1te909
d86a72f858 remove py 3.10.2 2022-05-10 18:52:47 +00:00
wh1te909
50cd7f219a update reqs 2022-05-10 18:45:33 +00:00
wh1te909
8252b3eccc more enum 2022-05-10 17:09:06 +00:00
wh1te909
d0c6e3a158 move tests 2022-05-10 16:29:09 +00:00
wh1te909
1505fa547e update nats reqs and change mod name 2022-05-10 16:27:51 +00:00
wh1te909
9017bad884 fix test 2022-05-09 17:35:07 +00:00
wh1te909
2ac5e316a5 refactor recovery func 2022-05-09 06:43:04 +00:00
wh1te909
29f9113062 update coverage 2022-05-09 06:40:33 +00:00
wh1te909
46349672d8 optimize checkrunner result endpoint 2022-05-09 06:23:59 +00:00
wh1te909
4787be2db0 fix tabs in popout view #1110 2022-05-04 22:17:45 +00:00
wh1te909
f0a8c5d732 fix site sorting fixes #1118 2022-05-04 15:46:05 +00:00
wh1te909
9ad520bf7c remove unnecessary middleware 2022-05-02 00:57:40 +00:00
wh1te909
bd0cc51554 update asgi 2022-05-02 00:57:01 +00:00
wh1te909
12f599f974 add linux mint 2022-05-01 20:34:31 +00:00
wh1te909
0118d5fb40 log enums 2022-04-30 02:01:20 +00:00
wh1te909
65cadb311a more enum 2022-04-29 19:09:17 +00:00
wh1te909
dd75bd197d add back dummy cache 2022-04-29 17:22:10 +00:00
wh1te909
7e155bdb43 typing 2022-04-29 06:45:20 +00:00
wh1te909
993b6fddf4 redundant 2022-04-29 06:41:40 +00:00
wh1te909
6ba51df6a7 add sh to download 2022-04-29 06:29:43 +00:00
wh1te909
1185ac58e1 more enum 2022-04-29 06:21:48 +00:00
wh1te909
f835997f49 switch runners and use redis cache during testing 2022-04-29 05:23:50 +00:00
wh1te909
a597dba775 fix role cache 2022-04-29 05:21:51 +00:00
wh1te909
3194e83a66 update reqs 2022-04-29 00:27:01 +00:00
wh1te909
096c3cdd34 more enum 2022-04-28 18:01:11 +00:00
wh1te909
3a1ea42333 fix settings 2022-04-28 17:33:51 +00:00
wh1te909
64877d4299 fix black not auto formatting 2022-04-28 17:25:21 +00:00
wh1te909
e957dc5e2c black 2022-04-28 17:24:45 +00:00
wh1te909
578d5c5830 more enum 2022-04-28 17:07:58 +00:00
sadnub
96284f9508 make eslint error on warnings 2022-04-28 09:50:41 -04:00
wh1te909
698b38dcba fix warning 2022-04-28 07:07:33 +00:00
wh1te909
6db826befe fix dir again 2022-04-28 07:02:38 +00:00
wh1te909
1a3d412d73 pwd 2022-04-28 07:01:44 +00:00
wh1te909
b8461c9dd8 fix dir 2022-04-28 07:00:09 +00:00
wh1te909
699bd9de10 fix workflow 2022-04-28 06:56:03 +00:00
Dan
54b6866e21 Merge pull request #1102 from sadnub/typescript
Typescript prep and add some linting
2022-04-27 23:45:33 -07:00
sadnub
afd155e9c1 fix actions 2022-04-27 22:48:29 -04:00
sadnub
910a717230 add name to gh action 2022-04-27 22:34:52 -04:00
sadnub
70fbd33d61 add lint and formatting gh action 2022-04-27 21:18:52 -04:00
sadnub
2da0d5ee21 add typescript support and stricter formatting/linting 2022-04-27 20:47:32 -04:00
wh1te909
98f64e057a update reqs 2022-04-27 05:07:22 +00:00
wh1te909
3d9d936c56 update url 2022-04-27 05:06:21 +00:00
wh1te909
2b4cb59df8 add more typing to dev reqs 2022-04-26 23:34:10 +00:00
wh1te909
9d80da52e3 more demo stuff 2022-04-26 23:33:45 +00:00
wh1te909
fd176d2c64 start moving to enums for choicefields 2022-04-26 01:13:18 +00:00
sadnub
538b6de36b set default alert severities for checks and tasks so that blank alert templates being applied to agent don't stop all alerts 2022-04-25 14:38:34 -04:00
sadnub
f7eca8aee0 Ignore timezone when editing automated tasks 2022-04-25 14:17:57 -04:00
wh1te909
a754d94c2c remove test multiprocessing 2022-04-25 08:15:06 +00:00
wh1te909
5e3493e6a9 fix tests 2022-04-25 07:54:31 +00:00
wh1te909
619a14c26b refactor action_type 2022-04-25 07:47:58 +00:00
wh1te909
7d9a8decf0 start choice field refactor, remove deprecated model fields 2022-04-25 07:10:33 +00:00
wh1te909
d11e14ad89 add migration 2022-04-25 06:51:42 +00:00
wh1te909
69189cf2af isort 2022-04-25 06:50:48 +00:00
wh1te909
6e7d2f19d2 update middleware for django 4, refactor a func to fix circular import, start fixing fake_agents script 2022-04-25 06:48:14 +00:00
wh1te909
d99ebf5d6a remove deprecated model field 2022-04-25 06:43:58 +00:00
wh1te909
ef2d19e95b back to develop 2022-04-25 01:36:12 +00:00
wh1te909
e3a66f017e Release 0.13.3 2022-04-25 01:32:11 +00:00
wh1te909
9e544ad471 bump version 2022-04-25 01:31:09 +00:00
sadnub
5f19aa527a fix running policy script checks 2022-04-24 21:02:28 -04:00
sadnub
bfd5bc5c26 remove port changes for persistent mesh configurations 2022-04-24 20:51:18 -04:00
wh1te909
2d0ec3accd back to dev 2022-04-25 00:38:47 +00:00
wh1te909
0999d98225 Release 0.13.2 2022-04-25 00:33:40 +00:00
wh1te909
d8dd3e133f bump version 2022-04-25 00:31:14 +00:00
wh1te909
528470c37f fix slow query 2022-04-25 00:17:34 +00:00
wh1te909
c03cd53853 fix deprecated function 2022-04-24 23:20:39 +00:00
wh1te909
b57fc8a29c testing num queries 2022-04-24 23:14:37 +00:00
wh1te909
a04ed5c3ca remove duplicate settings 2022-04-24 23:09:44 +00:00
Dan
3ad1df14f6 Merge pull request #1085 from dinger1986/patch-1
Update troubleshoot_server.sh
2022-04-24 15:26:47 -07:00
wh1te909
d8caf12fdc optimize policy query 2022-04-24 22:07:24 +00:00
wh1te909
5ca9d30d5f add a test and optimize some queries 2022-04-24 21:23:33 +00:00
sadnub
a7a71b4a46 fix default tab not working if 'servers' is selected 2022-04-24 16:35:42 -04:00
sadnub
638603ac6b fix variable substitution when running policy tasks 2022-04-24 16:35:18 -04:00
wh1te909
1d70c15027 wrong related 2022-04-24 01:45:03 +00:00
wh1te909
7a5f03d672 fix slow query 2022-04-23 23:24:36 +00:00
wh1te909
39e97c5589 Release 0.13.1 2022-04-23 22:59:35 +00:00
wh1te909
1943d8367e bump version 2022-04-23 22:57:09 +00:00
wh1te909
f91c5af9a1 optimize query 2022-04-23 22:51:45 +00:00
wh1te909
2be71fc877 increase chunk size 2022-04-23 22:50:06 +00:00
wh1te909
f5f5b4a8db fixed 'Save and test email' always returning success even if it failed 2022-04-23 18:50:23 +00:00
sadnub
ac9cfd09ea fix init container not having access to the redis service 2022-04-23 12:28:13 -04:00
sadnub
4cfc85dbfd fix agent sorting by last response 2022-04-23 12:21:18 -04:00
sadnub
1f3d2f47b1 increase max_length on customfield name field to 100 2022-04-23 10:26:34 -04:00
dinger1986
653c482ff7 Update troubleshoot_server.sh
add in output of afew log log files
2022-04-23 15:03:58 +01:00
wh1te909
4b069cc2b0 fix pending actions showing agent pending update even though it was already updated 2022-04-23 07:43:30 +00:00
wh1te909
c89349a43a update badge 2022-04-23 03:49:37 +00:00
wh1te909
6e92d6c62c testing codecov 2022-04-23 03:44:45 +00:00
wh1te909
5d3d3e9076 fix tests? 2022-04-23 02:07:00 +00:00
wh1te909
b440c772d6 forgot pytest.ini 2022-04-23 02:01:34 +00:00
wh1te909
2895560b30 testing pytest/codecov 2022-04-23 02:00:12 +00:00
wh1te909
bedcecb2e1 fix deprecations 2022-04-23 00:19:00 +00:00
wh1te909
656ac829a4 black 2022-04-22 23:22:41 +00:00
wh1te909
4d83debc0e also sort in db 2022-04-22 23:19:42 +00:00
wh1te909
4ff5d19979 fix sorting of clients tree 2022-04-22 22:48:54 +00:00
wh1te909
2216ee422e Release 0.13.0 2022-04-22 20:19:43 +00:00
wh1te909
9acda5696e bump versions 2022-04-22 19:00:14 +00:00
wh1te909
dc6255048a fix wrong token variable being passed to func 2022-04-22 18:19:26 +00:00
wh1te909
2acde429d7 lower intervals 2022-04-22 16:35:51 +00:00
wh1te909
efcac1adac fix runaway memory during migration 2022-04-22 08:43:06 +00:00
wh1te909
81d5ecd758 add a run checks button to checks tab 2022-04-22 08:09:21 +00:00
wh1te909
d9ff004454 add trailing slash 2022-04-22 08:08:59 +00:00
wh1te909
d57135d793 lower intervals 2022-04-22 08:08:49 +00:00
wh1te909
bb5a0023af update nats-api 2022-04-21 21:26:22 +00:00
wh1te909
e3c25a167e nats 2.8.1 2022-04-21 21:16:31 +00:00
wh1te909
5be93ae17d update nats-server 2022-04-21 21:01:59 +00:00
wh1te909
3a2511d4a1 remove deprecated certbot flag 2022-04-21 20:45:47 +00:00
wh1te909
8ec7d98eef remove old docker instructions 2022-04-21 20:45:06 +00:00
wh1te909
9421ae25f7 update scripts and reqs 2022-04-21 20:32:41 +00:00
wh1te909
5b288b6fa1 prevent dialog error when script hasn't run yet 2022-04-21 19:17:57 +00:00
wh1te909
d35ed2980b add helper to run all celery tasks 2022-04-21 19:16:45 +00:00
wh1te909
6d8df6d2b9 lower intervals 2022-04-21 18:18:21 +00:00
wh1te909
a839513f7f more optimization 2022-04-21 17:29:38 +00:00
wh1te909
97b37b4742 make policy tabs match agent tabs UI 2022-04-21 17:14:07 +00:00
wh1te909
4894031219 fix for policy tasks not deleting on agents 2022-04-21 07:33:58 +00:00
wh1te909
8985b5511c remove import 2022-04-21 07:25:13 +00:00
wh1te909
b3c2a6a0cc fix typing 2022-04-21 07:08:33 +00:00
wh1te909
7291b440bb fix sync status 2022-04-21 06:34:06 +00:00
wh1te909
d75f134677 fix comment 2022-04-21 06:32:31 +00:00
wh1te909
e60069ec1d fix assigned task not running when the check it was assigned to was a policy check 2022-04-21 06:28:22 +00:00
wh1te909
034f49573d fix slow query 2022-04-21 01:08:02 +00:00
wh1te909
973d37a237 improve wording and add tooltip 2022-04-20 22:11:47 +00:00
wh1te909
d2ec609e68 add some default services 2022-04-20 21:30:10 +00:00
wh1te909
6b410399cd add function to clear cache 2022-04-20 19:47:49 +00:00
wh1te909
0c010570b9 make agent overdue alert button text more clear 2022-04-20 19:09:40 +00:00
wh1te909
78fc7faa13 fix SynchronousOnlyOperation when calling task from celery, due to wrong query 2022-04-20 18:43:04 +00:00
wh1te909
7671cce263 refactor and add tests for constants 2022-04-20 17:21:26 +00:00
wh1te909
a43a66a2d3 add daphne lock to gitignore 2022-04-20 17:20:11 +00:00
wh1te909
2190a2ed25 use isinstance 2022-04-20 17:03:01 +00:00
wh1te909
227636b705 always send recovery alert at agent level 2022-04-20 16:52:31 +00:00
wh1te909
5032170362 fix cache when deleting global policy 2022-04-20 16:38:43 +00:00
sadnub
b94c3961eb fix availability alert filter to get overdue alerts working 2022-04-20 08:51:17 -04:00
sadnub
46c7e89a94 fix displaying correct alert severity 2022-04-19 22:37:56 -04:00
sadnub
80861fd620 revert back to psycopg2 package 2022-04-19 22:37:56 -04:00
wh1te909
44f9390790 fix task call 2022-04-20 00:35:02 +00:00
wh1te909
8eca6c409a update reqs 2022-04-19 23:12:12 +00:00
wh1te909
4907c01191 refactor stuff 2022-04-19 22:33:51 +00:00
wh1te909
04bf314c61 fix task name 2022-04-19 22:26:27 +00:00
sadnub
57d92b276b some more django orm magic 2022-04-18 13:04:19 -04:00
sadnub
6a8efddab5 add category label to filtered custom field results 2022-04-18 12:36:54 -04:00
sadnub
fd908494ae make supported dropdowns filterable 2022-04-18 12:33:33 -04:00
sadnub
d617b23c2f add monitoring_type /agents filter and add index for monitoring_type field 2022-04-18 12:33:11 -04:00
sadnub
27874728bc return scripts from api sorted by category. #1066 2022-04-16 22:30:50 -04:00
wh1te909
56a0345260 fix arg 2022-04-16 23:18:39 +00:00
wh1te909
c412839165 ctx is no longer needed 2022-04-16 23:02:41 +00:00
wh1te909
b77f927ad5 remove old field 2022-04-16 23:00:57 +00:00
sadnub
8edd7f6a56 show the task name in agent history output. #1000 2022-04-16 17:31:29 -04:00
sadnub
c6915d0291 make agent count take permissions into account. #1028 2022-04-16 17:27:20 -04:00
sadnub
388eb94014 make site dropdown filterable #1068 2022-04-16 17:06:46 -04:00
sadnub
9ab80553e1 fix ui bug where policy checks weren't displaying if they had associated tasks 2022-04-16 16:47:52 -04:00
sadnub
86d639ee6a fix migration 2022-04-16 16:31:35 -04:00
sadnub
979fd8a249 fix migrations 2022-04-16 16:22:41 -04:00
sadnub
e65ab58f84 fix patches pending field on agent table 2022-04-16 16:22:26 -04:00
sadnub
8414bdbab1 prune orphaned tasks on agents daily 2022-04-16 15:24:07 -04:00
sadnub
d037b09128 fix Alerts model not existing during initial migratio run 2022-04-16 15:23:51 -04:00
sadnub
9a687fec9b add meshcentral port alias to fix chat 2022-04-16 14:37:07 -04:00
sadnub
e9d71f169c optimize the cache_db_values task 2022-04-16 14:36:31 -04:00
sadnub
e09c307d58 fix tests 2022-04-15 17:42:45 -04:00
sadnub
d23d641b1b fix alert_severity on check_result defaulting to warning 2022-04-15 17:29:31 -04:00
sadnub
b1301091f9 add migration to remove duplicate win_task_names and make it unique 2022-04-15 17:06:33 -04:00
sadnub
2458eb3960 create migration that will fix duplicate win_task_names and make win_task_name unique 2022-04-15 14:52:37 -04:00
sadnub
fa836d88c7 fix migrating task/check results 2022-04-15 14:17:57 -04:00
sadnub
e26349f2fc more improvements loading the clients tree 2022-04-14 22:41:10 -04:00
sadnub
daa4e4d566 move maintenance mode and agent count checks to annotate to load client tree faster 2022-04-14 22:26:15 -04:00
sadnub
8e75df686d fix check/task results not being added to the model 2022-04-14 19:08:14 -04:00
sadnub
53537e7b3a make gh use the correct cache 2022-04-14 18:20:52 -04:00
sadnub
4beddc2271 fix tests? 2022-04-14 18:12:09 -04:00
sadnub
a6e4a774e0 fix tests 2022-04-14 18:03:39 -04:00
sadnub
dacc1c5770 fix cache not hitting if it returns an empty list 2022-04-14 17:58:51 -04:00
sadnub
25e922bc4c reduce queries on agent table load and bust cache on policy changes 2022-04-14 17:17:40 -04:00
wh1te909
c877c9b0fb back to latest 2022-04-13 16:15:02 +00:00
wh1te909
56bb206f25 back to latest 2022-04-13 16:14:40 +00:00
wh1te909
740a9ceaa7 add dev version 2022-04-13 07:43:36 +00:00
wh1te909
64e936127a revert docker version #1062 2022-04-13 05:38:10 +00:00
wh1te909
bd4549f389 revert docker version 2022-04-13 05:35:42 +00:00
sadnub
b1f7bd3ead fix tests 2022-04-12 16:38:57 -04:00
sadnub
b5e3b16e3a add option to disable mesh autologin 2022-04-12 16:18:31 -04:00
wh1te909
96a72a2cd7 0.12.4 fix login token 2022-04-12 19:09:04 +00:00
sadnub
c155da858e fix submitting saving date times and convert to native date input versus quasar 2022-04-12 13:38:53 -04:00
wh1te909
5e20a5cd71 0.12.4 fix login token 2022-04-12 16:49:00 +00:00
wh1te909
c1b2bbd152 update reqs 2022-04-12 06:14:46 +00:00
Dan
e3b5f418d6 Merge pull request #1008 from sadnub/develop
Policy rework, Global datetime format and other GH issue fixes
2022-04-11 16:21:05 -07:00
wh1te909
f82b589d03 Release 0.12.3 2022-04-11 23:16:28 +00:00
wh1te909
cddac4d0fb bump version 2022-04-11 22:21:57 +00:00
sadnub
dd6f92e54d supress the redis key length warning 2022-04-10 23:23:04 -04:00
sadnub
5d4558bddf fix caching tasks 2022-04-10 23:18:47 -04:00
sadnub
5aa7b5a337 add custom cache backend for deleting many keys with a pattern 2022-04-10 22:42:10 -04:00
wh1te909
2fe0b5b90d testing coverage exludes 2022-04-10 20:26:00 +00:00
wh1te909
aa6997990c fix import 2022-04-10 19:21:03 +00:00
sadnub
c02ab50a0a clear role cache on save 2022-04-10 10:46:47 -04:00
sadnub
7cb16b2259 add caching for coresettings and roles 2022-04-10 10:44:51 -04:00
wh1te909
3173dc83a5 Merge branch 'develop' of https://github.com/sadnub/tacticalrmm into sadnub-develop 2022-04-10 05:09:03 +00:00
wh1te909
baddc29bb8 fix tests 2022-04-10 05:08:20 +00:00
Dan
612cbe6be4 Merge branch 'develop' into develop 2022-04-09 21:46:43 -07:00
sadnub
4c1d2ab1bb fix agent not getting alert template when policies change 2022-04-09 23:56:04 -04:00
sadnub
6b4704b2e2 fix caching tasks 2022-04-09 22:01:07 -04:00
wh1te909
c2286cde01 fix reqs 2022-04-09 23:09:42 +00:00
wh1te909
24a17712e7 skip on empty dict 2022-04-09 23:09:21 +00:00
wh1te909
27d537e7bb do not silently continue on exception 2022-04-09 23:08:34 +00:00
wh1te909
dbd89c72a3 django 4 2022-04-09 17:18:35 +00:00
wh1te909
ff41bbd0e5 adjust celery config 2022-04-09 17:09:54 +00:00
wh1te909
4bdb6ae84e fix graphics 2022-04-09 17:09:09 +00:00
sadnub
cece7b79ad remove silk profile config 2022-04-09 12:22:22 -04:00
sadnub
8d09d95fc3 fix ci attempt 2 2022-04-09 12:19:38 -04:00
sadnub
752542a1d1 fix ci and fix caching 2022-04-09 09:04:56 -04:00
sadnub
dd077383f7 fix docker dev settings 2022-04-08 23:39:18 -04:00
sadnub
6e808dbb0f add meshctrl to dev dependencies 2022-04-08 23:27:32 -04:00
sadnub
4ef3441f70 typo 2022-04-08 23:25:24 -04:00
sadnub
82624d6657 fix tests and more typing 2022-04-08 23:23:10 -04:00
sadnub
62e2b5230c configure mypy vscode extension and start fixing some types 2022-04-08 23:23:10 -04:00
sadnub
3325c30f29 Django 4 upgrade and add REDIS as cache backend 2022-04-08 23:22:33 -04:00
sadnub
18a06168f1 wip improving query times 2022-04-08 23:20:51 -04:00
sadnub
27e93e499f add django-silk to the docker dev setup 2022-04-08 23:20:51 -04:00
sadnub
90644a21a3 add date formatting helper icon that opens to quasar's site 2022-04-08 23:19:35 -04:00
sadnub
7e31f43ef1 allow date_format customization on the user level 2022-04-08 23:19:35 -04:00
sadnub
b13fc1fba4 fix websockets issues 2022-04-08 23:19:35 -04:00
sadnub
5d9109e526 add tests for handling multple alert objects returned 2022-04-08 23:19:35 -04:00
sadnub
78dfa36b2a fix negative index issue 2022-04-08 23:19:35 -04:00
sadnub
dc05d87b44 add redis volume back 2022-04-08 23:19:35 -04:00
wh1te909
2c323a13c1 typo 2022-04-08 23:19:35 -04:00
sadnub
d4c5e38857 fix redis background save issue 2022-04-08 23:19:35 -04:00
sadnub
fb80e5c367 remove aof from redis 2022-04-08 23:19:35 -04:00
sadnub
beb08a3afb add script action migrate and remove deprecated fields 2022-04-08 23:19:35 -04:00
sadnub
7b2de8cbbd return 400 error if agent_id is missing from check results 2022-04-08 23:19:35 -04:00
sadnub
83e63bc87c migrate check/task result data to new table and create post update tasks to remove checks/tasks managed by policy 2022-04-08 23:19:35 -04:00
sadnub
4f5da33fd6 add some more typing info 2022-04-08 23:19:35 -04:00
sadnub
d00d003a67 fix typo 2022-04-08 23:18:02 -04:00
sadnub
002f24be10 fix fake agents script 2022-04-08 23:18:02 -04:00
sadnub
04992a1d95 add a helper to get global settings and work to remove all of the 'ignore: type' 2022-04-08 23:18:02 -04:00
sadnub
3c7cf2446e fix auto resolve alerts task to get policy checks as well 2022-04-08 23:14:05 -04:00
sadnub
29774ac014 fixed the rest of the tests and more bug fixes 2022-04-08 23:14:05 -04:00
sadnub
562d580987 fixed/implemented more tests and more bug fixes 2022-04-08 23:14:05 -04:00
sadnub
d8ad6c0cb0 code formatting 2022-04-08 23:14:05 -04:00
sadnub
7897b0ebe9 fix some tests and fixed tons of bugs 2022-04-08 23:14:04 -04:00
sadnub
e38af9fd16 rework task create/modify/delete/running and fix checks tests 2022-04-08 23:14:04 -04:00
sadnub
6ffdf5c251 Fixed Check History graph and reworked task sync_status 2022-04-08 23:14:04 -04:00
sadnub
69ef7676af finalize the schema and fix ui for checks 2022-04-08 23:14:04 -04:00
sadnub
b0ac57040c cleanup migrations and rework all checks/tasks to use the task/results table. fix alerts 2022-04-08 23:14:04 -04:00
sadnub
826ac7f185 returned tasks/checks in agent runner serializer and saving results 2022-04-08 23:14:04 -04:00
sadnub
0623f53f5d fix date format string empty 2022-04-08 23:14:04 -04:00
sadnub
b5ae875589 fix issue with multiple alert object being returned 2022-04-08 23:14:04 -04:00
sadnub
c152e18e1a policy rework init 2022-04-08 23:14:04 -04:00
sadnub
903f0e5e19 implement global datetime format. #1007 2022-04-08 23:14:04 -04:00
sadnub
6fefd5589c Allow canceling other pending actions. Fixes #958 2022-04-08 23:14:04 -04:00
wh1te909
58fe14bd31 add coverage badge 2022-04-09 02:10:51 +00:00
wh1te909
97f362ed1e fix for multiprocessing 2022-04-09 01:26:04 +00:00
wh1te909
b63e87ecb6 add parallel 2022-04-09 01:01:32 +00:00
wh1te909
ac3550dfd7 add lcov 2022-04-09 00:48:00 +00:00
wh1te909
8278a4cfd9 remove run 2022-04-09 00:45:02 +00:00
wh1te909
f161a2bbc8 more coveralls 2022-04-09 00:43:47 +00:00
wh1te909
6a94489df0 testing coveralls 2022-04-09 00:26:22 +00:00
wh1te909
c3a0b9192f update reqs 2022-04-08 19:34:38 +00:00
wh1te909
69ff70a9ce typo [skip ci] 2022-04-08 18:49:15 +00:00
wh1te909
5284eb0af8 validate mesh username 2022-04-08 18:47:57 +00:00
wh1te909
58384ae136 update supported version 2022-04-08 18:45:53 +00:00
wh1te909
054cc78e65 add meshctrl 2022-04-08 18:30:17 +00:00
wh1te909
8c283281d6 remove lower() from mesh username 2022-04-08 16:06:44 +00:00
wh1te909
241fe41756 fix env 2022-04-05 22:44:41 +00:00
wh1te909
e50e0626fa also check env 2022-04-05 21:31:13 +00:00
wh1te909
c9135f1573 add option to specify sslmode for nats-api pg connection closes #1049 2022-04-05 21:14:22 +00:00
345 changed files with 19327 additions and 12252 deletions

View File

@@ -105,7 +105,7 @@ services:
image: postgres:13-alpine
restart: always
environment:
POSTGRES_DB: tacticalrmm
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASS}
volumes:
@@ -120,7 +120,7 @@ services:
container_name: trmm-redis-dev
restart: always
user: 1000:1000
command: redis-server --appendonly yes
command: redis-server
image: redis:6.0-alpine
volumes:
- redis-data-dev:/data
@@ -145,6 +145,7 @@ services:
TRMM_PASS: ${TRMM_PASS}
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
APP_PORT: ${APP_PORT}
POSTGRES_DB: ${POSTGRES_DB}
depends_on:
- postgres-dev
- meshcentral-dev

View File

@@ -60,6 +60,8 @@ DEBUG = True
DOCKER_BUILD = True
SWAGGER_ENABLED = True
CERT_FILE = '${CERT_PUB_PATH}'
KEY_FILE = '${CERT_PRIV_PATH}'
@@ -94,6 +96,7 @@ EOF
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
# run migrations and init scripts
"${VIRTUAL_ENV}"/bin/python manage.py pre_update_tasks
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup

View File

@@ -1,36 +1,41 @@
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
asgiref==3.5.0
celery==5.2.3
celery==5.2.6
channels==3.0.4
channels_redis==3.3.1
channels_redis==3.4.0
daphne==3.0.2
Django==3.2.12
Django==4.0.4
django-cors-headers==3.11.0
django-ipware==4.0.2
django-rest-knox==4.2.0
djangorestframework==3.13.1
future==0.18.2
msgpack==1.0.3
nats-py==2.0.0
nats-py==2.1.0
packaging==21.3
psycopg2-binary==2.9.3
pycryptodome==3.14.1
pyotp==2.6.0
pytz==2021.3
pytz==2022.1
qrcode==7.3.1
redis==4.1.3
redis==4.2.2
requests==2.27.1
twilio==7.6.0
urllib3==1.26.8
twilio==7.8.1
urllib3==1.26.9
validators==0.18.2
websockets==10.1
drf_spectacular==0.21.2
websockets==10.2
drf_spectacular==0.22.0
meshctrl==0.1.15
hiredis==2.0.0
# dev
black==22.1.0
Werkzeug==2.0.2
black==22.3.0
django-extensions==3.1.5
Pygments==2.11.2
isort==5.10.1
mypy==0.931
types-pytz==2021.3.4
mypy==0.942
types-pytz==2021.3.6
model-bakery==1.5.0
coverage==6.3.2
django-silk==4.3.0
django-stubs==1.10.1
djangorestframework-stubs==1.5.0

View File

@@ -10,23 +10,36 @@ on:
jobs:
test:
runs-on: self-hosted
runs-on: ubuntu-latest
name: Tests
strategy:
matrix:
python-version: ['3.10.4']
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Setup virtual env and install requirements
- uses: harmon758/postgresql-action@v1
with:
postgresql version: '14'
postgresql db: 'pipeline'
postgresql user: 'pipeline'
postgresql password: 'pipeline123456'
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install redis
run: |
sudo apt update
sudo apt install -y redis
redis-server --version
- name: Install requirements
working-directory: api/tacticalrmm
run: |
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline'
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline'
sudo -u postgres psql -c 'CREATE DATABASE pipeline'
sudo -u postgres psql -c "SET client_encoding = 'UTF8'" pipeline
pwd
rm -rf /actions-runner/_work/trmm-actions/trmm-actions/api/env
cd api
python3.10 -m venv env
source env/bin/activate
cd tacticalrmm
python --version
SETTINGS_FILE="tacticalrmm/settings.py"
SETUPTOOLS_VER=$(grep "^SETUPTOOLS_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
@@ -38,19 +51,23 @@ jobs:
- name: Run django tests
env:
GHACTIONS: "yes"
working-directory: api/tacticalrmm
run: |
cd api/tacticalrmm
source ../env/bin/activate
coverage run manage.py test -v 2
pytest
if [ $? -ne 0 ]; then
exit 1
fi
- name: Codestyle black
working-directory: api/tacticalrmm
run: |
cd api
source env/bin/activate
black --exclude migrations/ --check tacticalrmm
if [ $? -ne 0 ]; then
exit 1
fi
- uses: codecov/codecov-action@v2
with:
directory: ./api/tacticalrmm
files: ./api/tacticalrmm/coverage.xml
verbose: true

27
.github/workflows/frontend-linting.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: Frontend Linting and Formatting
on:
push:
branches: [develop]
pull_request:
branches: [develop]
defaults:
run:
working-directory: web
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 16
- run: npm install
- name: Run Prettier formatting
run: npm run format
- name: Run ESLint
run: npm run lint -- --max-warnings=0

4
.gitignore vendored
View File

@@ -51,3 +51,7 @@ reset_db.sh
run_go_cmd.py
nats-api.conf
ignore/
coverage.lcov
daphne.sock.lock
.pytest_cache
coverage.xml

23
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,23 @@
{
"recommendations": [
// frontend
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"editorconfig.editorconfig",
"vue.volar",
"wayou.vscode-todo-highlight",
// python
"matangover.mypy",
"ms-python.python",
// golang
"golang.go"
],
"unwantedRecommendations": [
"octref.vetur",
"hookyqr.beautify",
"dbaeumer.jshint",
"ms-vscode.vscode-typescript-tslint-plugin"
]
}

137
.vscode/settings.json vendored
View File

@@ -1,68 +1,75 @@
{
"python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python",
"python.languageServer": "Pylance",
"python.analysis.extraPaths": [
"api/tacticalrmm",
"api/env",
],
"python.analysis.diagnosticSeverityOverrides": {
"reportUnusedImport": "error",
"reportDuplicateImport": "error",
},
"python.analysis.typeCheckingMode": "basic",
"python.formatting.provider": "black",
"editor.formatOnSave": true,
"vetur.format.defaultFormatter.js": "prettier",
"vetur.format.defaultFormatterOptions": {
"prettier": {
"semi": true,
"printWidth": 120,
"tabWidth": 2,
"useTabs": false,
"arrowParens": "avoid",
}
},
"vetur.format.options.tabSize": 2,
"vetur.format.options.useTabs": false,
"python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python",
"python.languageServer": "Pylance",
"python.analysis.extraPaths": ["api/tacticalrmm", "api/env"],
"python.analysis.diagnosticSeverityOverrides": {
"reportUnusedImport": "error",
"reportDuplicateImport": "error",
"reportGeneralTypeIssues": "none"
},
"python.analysis.typeCheckingMode": "basic",
"python.linting.enabled": true,
"python.linting.mypyEnabled": true,
"python.linting.mypyArgs": [
"--ignore-missing-imports",
"--follow-imports=silent",
"--show-column-numbers",
"--strict"
],
"python.linting.ignorePatterns": [
"**/site-packages/**/*.py",
".vscode/*.py",
"**env/**"
],
"python.formatting.provider": "black",
"mypy.targets": ["api/tacticalrmm"],
"mypy.runUsingActiveInterpreter": true,
"editor.bracketPairColorization.enabled": true,
"editor.guides.bracketPairs": true,
"editor.formatOnSave": true,
"[vue][javascript][typescript][javascriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.codeActionsOnSave": ["source.fixAll.eslint"],
},
"eslint.validate": ["javascript", "javascriptreact", "typescript", "vue"],
"typescript.tsdk": "node_modules/typescript/lib",
"files.watcherExclude": {
"files.watcherExclude": {
"files.watcherExclude": {
"**/.git/objects/**": true,
"**/.git/subtree-cache/**": true,
"**/node_modules/": true,
"/node_modules/**": true,
"**/env/": true,
"/env/**": true,
"**/__pycache__": true,
"/__pycache__/**": true,
"**/.cache": true,
"**/.eggs": true,
"**/.ipynb_checkpoints": true,
"**/.mypy_cache": true,
"**/.pytest_cache": true,
"**/*.egg-info": true,
"**/*.feather": true,
"**/*.parquet*": true,
"**/*.pyc": true,
"**/*.zip": true
},
},
"go.useLanguageServer": true,
"[go]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": false,
},
"editor.snippetSuggestions": "none",
},
"[go.mod]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
},
"gopls": {
"usePlaceholders": true,
"completeUnimported": true,
"staticcheck": true,
"**/.git/objects/**": true,
"**/.git/subtree-cache/**": true,
"**/node_modules/": true,
"/node_modules/**": true,
"**/env/": true,
"/env/**": true,
"**/__pycache__": true,
"/__pycache__/**": true,
"**/.cache": true,
"**/.eggs": true,
"**/.ipynb_checkpoints": true,
"**/.mypy_cache": true,
"**/.pytest_cache": true,
"**/*.egg-info": true,
"**/*.feather": true,
"**/*.parquet*": true,
"**/*.pyc": true,
"**/*.zip": true
}
}
},
"go.useLanguageServer": true,
"[go]": {
"editor.codeActionsOnSave": {
"source.organizeImports": false
},
"editor.snippetSuggestions": "none"
},
"[go.mod]": {
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
"gopls": {
"usePlaceholders": true,
"completeUnimported": true,
"staticcheck": true
}
}

23
.vscode/tasks.json vendored
View File

@@ -1,23 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "docker debug",
"type": "shell",
"command": "docker-compose",
"args": [
"-p",
"trmm",
"-f",
".devcontainer/docker-compose.yml",
"-f",
".devcontainer/docker-compose.debug.yml",
"up",
"-d",
"--build"
]
}
]
}

View File

@@ -1,13 +1,13 @@
# Tactical RMM
![](https://github.com/amidaware/tacticalrmm/actions/workflows/ci-tests.yml/badge.svg?branch=develop)
[![Coverage Status](https://coveralls.io/repos/github/wh1te909/tacticalrmm/badge.png?branch=develop&kill_cache=1)](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
![CI Tests](https://github.com/amidaware/tacticalrmm/actions/workflows/ci-tests.yml/badge.svg?branch=develop)
[![codecov](https://codecov.io/gh/amidaware/tacticalrmm/branch/develop/graph/badge.svg?token=8ACUPVPTH6)](https://codecov.io/gh/amidaware/tacticalrmm)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black)
Tactical RMM is a remote monitoring & management tool, built with Django and Vue.\
It uses an [agent](https://github.com/amidaware/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
# [LIVE DEMO](https://demo.tacticalrmm.com/)
Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
### [Discord Chat](https://discord.gg/upGTkWp)

View File

@@ -4,8 +4,8 @@
| Version | Supported |
| ------- | ------------------ |
| 0.12.0 | :white_check_mark: |
| < 0.12.0 | :x: |
| 0.12.2 | :white_check_mark: |
| < 0.12.2 | :x: |
## Reporting a Vulnerability

View File

@@ -20,6 +20,7 @@ omit =
*/urls.py
*/tests.py
*/test.py
*/tests/*
checks/utils.py
*/asgi.py
*/demo_views.py

View File

@@ -1,22 +1,23 @@
import uuid
from accounts.models import User
from django.core.management.base import BaseCommand
from accounts.models import User
class Command(BaseCommand):
help = "Creates the installer user"
def handle(self, *args, **kwargs):
def handle(self, *args, **kwargs): # type: ignore
self.stdout.write("Checking if installer user has been created...")
if User.objects.filter(is_installer_user=True).exists():
self.stdout.write("Installer user already exists")
return
User.objects.create_user( # type: ignore
User.objects.create_user(
username=uuid.uuid4().hex,
is_installer_user=True,
password=User.objects.make_random_password(60), # type: ignore
password=User.objects.make_random_password(60),
block_dashboard_login=True,
)
self.stdout.write("Installer user has been created")

View File

@@ -6,7 +6,7 @@ from knox.models import AuthToken
class Command(BaseCommand):
help = "Deletes all knox web tokens"
def handle(self, *args, **kwargs):
def handle(self, *args, **kwargs): # type: ignore
# only delete web tokens, not any generated by the installer or deployments
dont_delete = djangotime.now() + djangotime.timedelta(hours=23)
tokens = AuthToken.objects.exclude(deploytokens__isnull=False).filter(

View File

@@ -1,9 +1,10 @@
import subprocess
import pyotp
from accounts.models import User
from django.core.management.base import BaseCommand
from accounts.models import User
class Command(BaseCommand):
help = "Generates barcode for Authenticator and creates totp for user"

View File

@@ -2,9 +2,10 @@ import os
import subprocess
import pyotp
from accounts.models import User
from django.core.management.base import BaseCommand
from accounts.models import User
class Command(BaseCommand):
help = "Reset 2fa"

View File

@@ -1,6 +1,7 @@
from accounts.models import User
from django.core.management.base import BaseCommand
from accounts.models import User
class Command(BaseCommand):
help = "Reset password for user"

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2.12 on 2022-04-02 15:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0030_auto_20211104_0221'),
]
operations = [
migrations.AddField(
model_name='user',
name='date_format',
field=models.CharField(blank=True, max_length=30, null=True),
),
]

View File

@@ -1,25 +1,17 @@
from typing import Optional
from django.contrib.auth.models import AbstractUser
from django.core.cache import cache
from django.db import models
from django.db.models.fields import CharField, DateTimeField
from logs.models import BaseAuditModel
AGENT_DBLCLICK_CHOICES = [
("editagent", "Edit Agent"),
("takecontrol", "Take Control"),
("remotebg", "Remote Background"),
("urlaction", "URL Action"),
]
AGENT_TBL_TAB_CHOICES = [
("server", "Servers"),
("workstation", "Workstations"),
("mixed", "Mixed"),
]
CLIENT_TREE_SORT_CHOICES = [
("alphafail", "Move failing clients to the top"),
("alpha", "Sort alphabetically"),
]
from tacticalrmm.constants import (
ROLE_CACHE_PREFIX,
AgentDblClick,
AgentTableTabs,
ClientTreeSort,
)
class User(AbstractUser, BaseAuditModel):
@@ -29,7 +21,7 @@ class User(AbstractUser, BaseAuditModel):
dark_mode = models.BooleanField(default=True)
show_community_scripts = models.BooleanField(default=True)
agent_dblclick_action = models.CharField(
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
max_length=50, choices=AgentDblClick.choices, default=AgentDblClick.EDIT_AGENT
)
url_action = models.ForeignKey(
"core.URLAction",
@@ -39,15 +31,16 @@ class User(AbstractUser, BaseAuditModel):
on_delete=models.SET_NULL,
)
default_agent_tbl_tab = models.CharField(
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
max_length=50, choices=AgentTableTabs.choices, default=AgentTableTabs.SERVER
)
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
client_tree_sort = models.CharField(
max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail"
max_length=50, choices=ClientTreeSort.choices, default=ClientTreeSort.ALPHA_FAIL
)
client_tree_splitter = models.PositiveIntegerField(default=11)
loading_bar_color = models.CharField(max_length=255, default="red")
clear_search_when_switching = models.BooleanField(default=True)
date_format = models.CharField(max_length=30, blank=True, null=True)
is_installer_user = models.BooleanField(default=False)
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
@@ -74,6 +67,23 @@ class User(AbstractUser, BaseAuditModel):
return UserSerializer(user).data
def get_and_set_role_cache(self) -> "Optional[Role]":
role = cache.get(f"{ROLE_CACHE_PREFIX}{self.role}")
if role and isinstance(role, Role):
return role
elif not role and not self.role:
return None
else:
models.prefetch_related_objects(
[self.role],
"can_view_clients",
"can_view_sites",
)
cache.set(f"{ROLE_CACHE_PREFIX}{self.role}", self.role, 600)
return self.role
class Role(BaseAuditModel):
name = models.CharField(max_length=255, unique=True)
@@ -174,6 +184,12 @@ class Role(BaseAuditModel):
def __str__(self):
return self.name
def save(self, *args, **kwargs) -> None:
# delete cache on save
cache.delete(f"{ROLE_CACHE_PREFIX}{self.name}")
super(BaseAuditModel, self).save(*args, **kwargs)
@staticmethod
def serialize(role):
# serializes the agent and returns json

View File

@@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm
class AccountsPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
return _has_perm(r, "can_list_accounts")
else:
@@ -28,7 +28,7 @@ class AccountsPerms(permissions.BasePermission):
class RolesPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
return _has_perm(r, "can_list_roles")
else:
@@ -36,7 +36,7 @@ class RolesPerms(permissions.BasePermission):
class APIKeyPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
return _has_perm(r, "can_list_api_keys")

View File

@@ -22,6 +22,7 @@ class UserUISerializer(ModelSerializer):
"loading_bar_color",
"clear_search_when_switching",
"block_dashboard_login",
"date_format",
]
@@ -39,6 +40,7 @@ class UserSerializer(ModelSerializer):
"last_login_ip",
"role",
"block_dashboard_login",
"date_format",
]

View File

@@ -1,16 +1,17 @@
from unittest.mock import patch
from accounts.models import APIKey, User
from accounts.serializers import APIKeySerializer
from django.test import override_settings
from model_bakery import baker, seq
from accounts.models import APIKey, User
from accounts.serializers import APIKeySerializer
from tacticalrmm.constants import AgentDblClick, AgentTableTabs, ClientTreeSort
from tacticalrmm.test import TacticalTestCase
class TestAccounts(TacticalTestCase):
def setUp(self):
self.client_setup()
self.setup_client()
self.bob = User(username="bob")
self.bob.set_password("hunter2")
self.bob.save()
@@ -69,17 +70,17 @@ class TestAccounts(TacticalTestCase):
self.assertEqual(r.status_code, 400)
self.assertIn("non_field_errors", r.data.keys())
@override_settings(DEBUG=True)
@patch("pyotp.TOTP.verify")
def test_debug_login_view(self, mock_verify):
url = "/login/"
mock_verify.return_value = True
# @override_settings(DEBUG=True)
# @patch("pyotp.TOTP.verify")
# def test_debug_login_view(self, mock_verify):
# url = "/login/"
# mock_verify.return_value = True
data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertIn("expiry", r.data.keys())
self.assertIn("token", r.data.keys())
# data = {"username": "bob", "password": "hunter2", "twofactor": "sekret"}
# r = self.client.post(url, data, format="json")
# self.assertEqual(r.status_code, 200)
# self.assertIn("expiry", r.data.keys())
# self.assertIn("token", r.data.keys())
class TestGetAddUsers(TacticalTestCase):
@@ -283,9 +284,9 @@ class TestUserAction(TacticalTestCase):
data = {
"dark_mode": True,
"show_community_scripts": True,
"agent_dblclick_action": "editagent",
"default_agent_tbl_tab": "mixed",
"client_tree_sort": "alpha",
"agent_dblclick_action": AgentDblClick.EDIT_AGENT,
"default_agent_tbl_tab": AgentTableTabs.MIXED,
"client_tree_sort": ClientTreeSort.ALPHA,
"client_tree_splitter": 14,
"loading_bar_color": "green",
"clear_search_when_switching": False,
@@ -308,7 +309,7 @@ class TestAPIKeyViews(TacticalTestCase):
serializer = APIKeySerializer(apikeys, many=True)
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(serializer.data, resp.data) # type: ignore
self.assertEqual(serializer.data, resp.data)
self.check_not_authenticated("get", url)
@@ -331,14 +332,14 @@ class TestAPIKeyViews(TacticalTestCase):
self.assertEqual(resp.status_code, 404)
apikey = baker.make("accounts.APIKey", name="Test")
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
url = f"/accounts/apikeys/{apikey.pk}/"
data = {"name": "New Name"} # type: ignore
data = {"name": "New Name"}
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
apikey = APIKey.objects.get(pk=apikey.pk) # type: ignore
self.assertEquals(apikey.name, "New Name")
apikey = APIKey.objects.get(pk=apikey.pk)
self.assertEqual(apikey.name, "New Name")
self.check_not_authenticated("put", url)
@@ -349,11 +350,11 @@ class TestAPIKeyViews(TacticalTestCase):
# test delete api key
apikey = baker.make("accounts.APIKey")
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
url = f"/accounts/apikeys/{apikey.pk}/"
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists()) # type: ignore
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists())
self.check_not_authenticated("delete", url)
@@ -393,7 +394,7 @@ class TestAPIAuthentication(TacticalTestCase):
name="Test Token", key="123456", user=self.user
)
self.client_setup()
self.setup_client()
def test_api_auth(self):
url = "/clients/"

View File

@@ -5,13 +5,13 @@ from django.db import IntegrityError
from django.shortcuts import get_object_or_404
from ipware import get_client_ip
from knox.views import LoginView as KnoxLoginView
from logs.models import AuditLog
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from tacticalrmm.utils import notify_error
from logs.models import AuditLog
from tacticalrmm.helpers import notify_error
from .models import APIKey, Role, User
from .permissions import AccountsPerms, APIKeyPerms, RolesPerms

View File

@@ -1,6 +1,7 @@
from django.core.management.base import BaseCommand
from agents.models import Agent
from clients.models import Client, Site
from django.core.management.base import BaseCommand
class Command(BaseCommand):

View File

@@ -1,10 +1,10 @@
import asyncio
from agents.models import Agent
from django.core.management.base import BaseCommand
from django.utils import timezone as djangotime
from packaging import version as pyver
from agents.models import Agent
from tacticalrmm.constants import AGENT_DEFER
from tacticalrmm.utils import reload_nats

View File

@@ -1,11 +1,12 @@
# import datetime as dt
import random
from agents.models import Agent
from core.tasks import cache_db_fields_task
from django.core.management.base import BaseCommand
from django.utils import timezone as djangotime
from agents.models import Agent
from core.tasks import cache_db_fields_task, handle_resolved_stuff
class Command(BaseCommand):
help = "stuff for demo site in cron"
@@ -23,17 +24,10 @@ class Command(BaseCommand):
rand = now - djangotime.timedelta(minutes=random.randint(10, 20))
random_dates.append(rand)
""" for _ in range(5):
rand = djangotime.now() - djangotime.timedelta(hours=random.randint(1, 10))
random_dates.append(rand)
for _ in range(5):
rand = djangotime.now() - djangotime.timedelta(days=random.randint(40, 90))
random_dates.append(rand) """
agents = Agent.objects.only("last_seen")
for agent in agents:
agent.last_seen = random.choice(random_dates)
agent.save(update_fields=["last_seen"])
cache_db_fields_task()
handle_resolved_stuff()

View File

@@ -3,30 +3,48 @@ import json
import random
import string
from accounts.models import User
from agents.models import Agent, AgentHistory
from automation.models import Policy
from autotasks.models import AutomatedTask
from checks.models import Check, CheckHistory
from clients.models import Client, Site
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.utils import timezone as djangotime
from accounts.models import User
from agents.models import Agent, AgentHistory
from automation.models import Policy
from autotasks.models import AutomatedTask, TaskResult
from checks.models import Check, CheckHistory, CheckResult
from clients.models import Client, Site
from logs.models import AuditLog, PendingAction
from scripts.models import Script
from software.models import InstalledSoftware
from winupdate.models import WinUpdate, WinUpdatePolicy
from tacticalrmm.constants import (
CheckStatus,
CheckType,
EvtLogFailWhen,
EvtLogNames,
EvtLogTypes,
PAAction,
ScriptShell,
)
from tacticalrmm.demo_data import (
check_network_loc_aware_ps1,
check_storage_pool_health_ps1,
clear_print_spool_bat,
disks,
disks_linux_deb,
disks_linux_pi,
ping_fail_output,
ping_success_output,
restart_nla_ps1,
show_temp_dir_py,
spooler_stdout,
temp_dir_stdout,
wmi_deb,
wmi_pi,
)
from winupdate.models import WinUpdate, WinUpdatePolicy
AGENTS_TO_GENERATE = 250
AGENTS_TO_GENERATE = 20
SVCS = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
WMI_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi1.json")
@@ -43,18 +61,19 @@ EVT_LOG_FAIL = settings.BASE_DIR.joinpath(
class Command(BaseCommand):
help = "populate database with fake agents"
def rand_string(self, length):
def rand_string(self, length: int) -> str:
chars = string.ascii_letters
return "".join(random.choice(chars) for _ in range(length))
def handle(self, *args, **kwargs):
def handle(self, *args, **kwargs) -> None:
user = User.objects.first()
user.totp_key = "ABSA234234"
user.save(update_fields=["totp_key"])
if user:
user.totp_key = "ABSA234234"
user.save(update_fields=["totp_key"])
Client.objects.all().delete()
Agent.objects.all().delete()
Client.objects.all().delete()
Check.objects.all().delete()
Script.objects.all().delete()
AutomatedTask.objects.all().delete()
@@ -64,6 +83,9 @@ class Command(BaseCommand):
PendingAction.objects.all().delete()
call_command("load_community_scripts")
call_command("initial_db_setup")
call_command("load_chocos")
call_command("create_installer_user")
# policies
check_policy = Policy()
@@ -94,27 +116,27 @@ class Command(BaseCommand):
update_policy.email_if_fail = True
update_policy.save()
clients = [
clients = (
"Company 1",
"Company 2",
"Company 3",
"Company 1",
"Company 4",
"Company 5",
"Company 6",
]
sites1 = ["HQ1", "LA Office 1", "NY Office 1"]
sites2 = ["HQ2", "LA Office 2", "NY Office 2"]
sites3 = ["HQ3", "LA Office 3", "NY Office 3"]
sites4 = ["HQ4", "LA Office 4", "NY Office 4"]
sites5 = ["HQ5", "LA Office 5", "NY Office 5"]
sites6 = ["HQ6", "LA Office 6", "NY Office 6"]
)
sites1 = ("HQ1", "LA Office 1", "NY Office 1")
sites2 = ("HQ2", "LA Office 2", "NY Office 2")
sites3 = ("HQ3", "LA Office 3", "NY Office 3")
sites4 = ("HQ4", "LA Office 4", "NY Office 4")
sites5 = ("HQ5", "LA Office 5", "NY Office 5")
sites6 = ("HQ6", "LA Office 6", "NY Office 6")
client1 = Client(name="Company 1")
client2 = Client(name="Company 2")
client3 = Client(name="Company 3")
client4 = Client(name="Company 4")
client5 = Client(name="Company 5")
client6 = Client(name="Company 6")
client1 = Client(name=clients[0])
client2 = Client(name=clients[1])
client3 = Client(name=clients[2])
client4 = Client(name=clients[3])
client5 = Client(name=clients[4])
client6 = Client(name=clients[5])
client1.save()
client2.save()
@@ -141,7 +163,7 @@ class Command(BaseCommand):
for site in sites6:
Site(client=client6, name=site).save()
hostnames = [
hostnames = (
"DC-1",
"DC-2",
"FSV-1",
@@ -149,26 +171,30 @@ class Command(BaseCommand):
"WSUS",
"DESKTOP-12345",
"LAPTOP-55443",
]
descriptions = ["Bob's computer", "Primary DC", "File Server", "Karen's Laptop"]
modes = ["server", "workstation"]
op_systems_servers = [
)
descriptions = ("Bob's computer", "Primary DC", "File Server", "Karen's Laptop")
modes = ("server", "workstation")
op_systems_servers = (
"Microsoft Windows Server 2016 Standard, 64bit (build 14393)",
"Microsoft Windows Server 2012 R2 Standard, 64bit (build 9600)",
"Microsoft Windows Server 2019 Standard, 64bit (build 17763)",
]
)
op_systems_workstations = [
op_systems_workstations = (
"Microsoft Windows 8.1 Pro, 64bit (build 9600)",
"Microsoft Windows 10 Pro for Workstations, 64bit (build 18363)",
"Microsoft Windows 10 Pro, 64bit (build 18363)",
]
)
public_ips = ["65.234.22.4", "74.123.43.5", "44.21.134.45"]
linux_deb_os = "Debian 11.2 x86_64 5.10.0-11-amd64"
linux_pi_os = "Raspbian 11.2 armv7l 5.10.92-v7+"
total_rams = [4, 8, 16, 32, 64, 128]
public_ips = ("65.234.22.4", "74.123.43.5", "44.21.134.45")
total_rams = (4, 8, 16, 32, 64, 128)
now = dt.datetime.now()
django_now = djangotime.now()
boot_times = []
@@ -180,7 +206,7 @@ class Command(BaseCommand):
rand_days = now - dt.timedelta(days=random.randint(2, 50))
boot_times.append(str(rand_days.timestamp()))
user_names = ["None", "Karen", "Steve", "jsmith", "jdoe"]
user_names = ("None", "Karen", "Steve", "jsmith", "jdoe")
with open(SVCS) as f:
services = json.load(f)
@@ -195,10 +221,7 @@ class Command(BaseCommand):
with open(WMI_3) as f:
wmi3 = json.load(f)
wmi_details = []
wmi_details.append(wmi1)
wmi_details.append(wmi2)
wmi_details.append(wmi3)
wmi_details = [i for i in (wmi1, wmi2, wmi3)]
# software
with open(SW_1) as f:
@@ -207,9 +230,7 @@ class Command(BaseCommand):
with open(SW_2) as f:
software2 = json.load(f)
softwares = []
softwares.append(software1)
softwares.append(software2)
softwares = [i for i in (software1, software2)]
# windows updates
with open(WIN_UPDATES) as f:
@@ -225,72 +246,97 @@ class Command(BaseCommand):
clear_spool.name = "Clear Print Spooler"
clear_spool.description = "clears the print spooler. Fuck printers"
clear_spool.filename = "clear_print_spool.bat"
clear_spool.shell = "cmd"
clear_spool.shell = ScriptShell.CMD
clear_spool.script_body = clear_print_spool_bat
clear_spool.save()
check_net_aware = Script()
check_net_aware.name = "Check Network Location Awareness"
check_net_aware.description = "Check's network location awareness on domain computers, should always be domain profile and not public or private. Sometimes happens when computer restarts before domain available. This script will return 0 if check passes or 1 if it fails."
check_net_aware.filename = "check_network_loc_aware.ps1"
check_net_aware.shell = "powershell"
check_net_aware.shell = ScriptShell.POWERSHELL
check_net_aware.script_body = check_network_loc_aware_ps1
check_net_aware.save()
check_pool_health = Script()
check_pool_health.name = "Check storage spool health"
check_pool_health.description = "loops through all storage pools and will fail if any of them are not healthy"
check_pool_health.filename = "check_storage_pool_health.ps1"
check_pool_health.shell = "powershell"
check_pool_health.shell = ScriptShell.POWERSHELL
check_pool_health.script_body = check_storage_pool_health_ps1
check_pool_health.save()
restart_nla = Script()
restart_nla.name = "Restart NLA Service"
restart_nla.description = "restarts the Network Location Awareness windows service to fix the nic profile. Run this after the check network service fails"
restart_nla.filename = "restart_nla.ps1"
restart_nla.shell = "powershell"
restart_nla.shell = ScriptShell.POWERSHELL
restart_nla.script_body = restart_nla_ps1
restart_nla.save()
show_tmp_dir_script = Script()
show_tmp_dir_script.name = "Check temp dir"
show_tmp_dir_script.description = "shows files in temp dir using python"
show_tmp_dir_script.filename = "show_temp_dir.py"
show_tmp_dir_script.shell = "python"
show_tmp_dir_script.shell = ScriptShell.PYTHON
show_tmp_dir_script.script_body = show_temp_dir_py
show_tmp_dir_script.save()
for count_agents in range(AGENTS_TO_GENERATE):
client = random.choice(clients)
if client == "Company 1":
if client == clients[0]:
site = random.choice(sites1)
elif client == "Company 2":
elif client == clients[1]:
site = random.choice(sites2)
elif client == "Company 3":
elif client == clients[2]:
site = random.choice(sites3)
elif client == "Company 4":
elif client == clients[3]:
site = random.choice(sites4)
elif client == "Company 5":
elif client == clients[4]:
site = random.choice(sites5)
elif client == "Company 6":
elif client == clients[5]:
site = random.choice(sites6)
agent = Agent()
mode = random.choice(modes)
if mode == "server":
agent.operating_system = random.choice(op_systems_servers)
plat_pick = random.randint(1, 15)
if plat_pick in (7, 11):
agent.plat = "linux"
mode = "server"
# pi arm
if plat_pick == 7:
agent.goarch = "arm"
agent.wmi_detail = wmi_pi
agent.disks = disks_linux_pi
agent.operating_system = linux_pi_os
else:
agent.goarch = "amd64"
agent.wmi_detail = wmi_deb
agent.disks = disks_linux_deb
agent.operating_system = linux_deb_os
else:
agent.operating_system = random.choice(op_systems_workstations)
agent.plat = "windows"
agent.goarch = "amd64"
mode = random.choice(modes)
agent.wmi_detail = random.choice(wmi_details)
agent.services = services
agent.disks = random.choice(disks)
if mode == "server":
agent.operating_system = random.choice(op_systems_servers)
else:
agent.operating_system = random.choice(op_systems_workstations)
agent.hostname = random.choice(hostnames)
agent.version = settings.LATEST_AGENT_VER
agent.site = Site.objects.get(name=site)
agent.agent_id = self.rand_string(25)
agent.agent_id = self.rand_string(40)
agent.description = random.choice(descriptions)
agent.monitoring_type = mode
agent.public_ip = random.choice(public_ips)
agent.last_seen = djangotime.now()
agent.plat = "windows"
agent.plat_release = "windows-2019Server"
agent.last_seen = django_now
agent.total_ram = random.choice(total_rams)
agent.boot_time = random.choice(boot_times)
agent.logged_in_username = random.choice(user_names)
@@ -300,35 +346,31 @@ class Command(BaseCommand):
agent.overdue_email_alert = random.choice([True, False])
agent.overdue_text_alert = random.choice([True, False])
agent.needs_reboot = random.choice([True, False])
agent.wmi_detail = random.choice(wmi_details)
agent.services = services
agent.disks = random.choice(disks)
agent.save()
InstalledSoftware(agent=agent, software=random.choice(softwares)).save()
if agent.plat == "windows":
InstalledSoftware(agent=agent, software=random.choice(softwares)).save()
if mode == "workstation":
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
else:
WinUpdatePolicy(agent=agent).save()
# windows updates load
guids = []
for k in windows_updates.keys():
guids.append(k)
for i in guids:
WinUpdate(
agent=agent,
guid=i,
kb=windows_updates[i]["KBs"][0],
title=windows_updates[i]["Title"],
installed=windows_updates[i]["Installed"],
downloaded=windows_updates[i]["Downloaded"],
description=windows_updates[i]["Description"],
severity=windows_updates[i]["Severity"],
).save()
if agent.plat == "windows":
# windows updates load
guids = [i for i in windows_updates.keys()]
for i in guids:
WinUpdate(
agent=agent,
guid=i,
kb=windows_updates[i]["KBs"][0],
title=windows_updates[i]["Title"],
installed=windows_updates[i]["Installed"],
downloaded=windows_updates[i]["Downloaded"],
description=windows_updates[i]["Description"],
severity=windows_updates[i]["Severity"],
).save()
# agent histories
hist = AgentHistory()
@@ -352,56 +394,68 @@ class Command(BaseCommand):
}
hist1.save()
# disk space check
check1 = Check()
check1.agent = agent
check1.check_type = "diskspace"
check1.status = "passing"
check1.last_run = djangotime.now()
check1.more_info = "Total: 498.7GB, Free: 287.4GB"
check1.warning_threshold = 25
check1.error_threshold = 10
check1.disk = "C:"
check1.email_alert = random.choice([True, False])
check1.text_alert = random.choice([True, False])
check1.save()
if agent.plat == "windows":
# disk space check
check1 = Check()
check1.agent = agent
check1.check_type = CheckType.DISK_SPACE
check1.warning_threshold = 25
check1.error_threshold = 10
check1.disk = "C:"
check1.email_alert = random.choice([True, False])
check1.text_alert = random.choice([True, False])
check1.save()
for i in range(30):
check1_history = CheckHistory()
check1_history.check_id = check1.id
check1_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
check1_history.y = random.randint(13, 40)
check1_history.save()
check_result1 = CheckResult()
check_result1.agent = agent
check_result1.assigned_check = check1
check_result1.status = CheckStatus.PASSING
check_result1.last_run = django_now
check_result1.more_info = "Total: 498.7GB, Free: 287.4GB"
check_result1.save()
for i in range(30):
check1_history = CheckHistory()
check1_history.check_id = check1.pk
check1_history.agent_id = agent.agent_id
check1_history.x = django_now - djangotime.timedelta(minutes=i * 2)
check1_history.y = random.randint(13, 40)
check1_history.save()
# ping check
check2 = Check()
check_result2 = CheckResult()
check2.agent = agent
check2.check_type = "ping"
check2.last_run = djangotime.now()
check2.check_type = CheckType.PING
check2.email_alert = random.choice([True, False])
check2.text_alert = random.choice([True, False])
check_result2.agent = agent
check_result2.assigned_check = check2
check_result2.last_run = django_now
if site in sites5:
check2.name = "Synology NAS"
check2.status = "failing"
check2.alert_severity = "error"
check_result2.status = CheckStatus.FAILING
check2.ip = "172.17.14.26"
check2.more_info = ping_fail_output
check_result2.more_info = ping_fail_output
else:
check2.name = "Google"
check2.status = "passing"
check_result2.status = CheckStatus.PASSING
check2.ip = "8.8.8.8"
check2.more_info = ping_success_output
check_result2.more_info = ping_success_output
check2.save()
check_result2.save()
for i in range(30):
check2_history = CheckHistory()
check2_history.check_id = check2.id
check2_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
check2_history.check_id = check2.pk
check2_history.agent_id = agent.agent_id
check2_history.x = django_now - djangotime.timedelta(minutes=i * 2)
if site in sites5:
check2_history.y = 1
check2_history.results = ping_fail_output
@@ -413,66 +467,97 @@ class Command(BaseCommand):
# cpu load check
check3 = Check()
check3.agent = agent
check3.check_type = "cpuload"
check3.status = "passing"
check3.last_run = djangotime.now()
check3.check_type = CheckType.CPU_LOAD
check3.warning_threshold = 70
check3.error_threshold = 90
check3.history = [15, 23, 16, 22, 22, 27, 15, 23, 23, 20, 10, 10, 13, 34]
check3.email_alert = random.choice([True, False])
check3.text_alert = random.choice([True, False])
check3.save()
check_result3 = CheckResult()
check_result3.agent = agent
check_result3.assigned_check = check3
check_result3.status = CheckStatus.PASSING
check_result3.last_run = django_now
check_result3.history = [
15,
23,
16,
22,
22,
27,
15,
23,
23,
20,
10,
10,
13,
34,
]
check_result3.save()
for i in range(30):
check3_history = CheckHistory()
check3_history.check_id = check3.id
check3_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
check3_history.check_id = check3.pk
check3_history.agent_id = agent.agent_id
check3_history.x = django_now - djangotime.timedelta(minutes=i * 2)
check3_history.y = random.randint(2, 79)
check3_history.save()
# memory check
check4 = Check()
check4.agent = agent
check4.check_type = "memory"
check4.status = "passing"
check4.check_type = CheckType.MEMORY
check4.warning_threshold = 70
check4.error_threshold = 85
check4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34]
check4.email_alert = random.choice([True, False])
check4.text_alert = random.choice([True, False])
check4.save()
check_result4 = CheckResult()
check_result4.agent = agent
check_result4.assigned_check = check4
check_result4.status = CheckStatus.PASSING
check_result4.last_run = django_now
check_result4.history = [34, 34, 35, 36, 34, 34, 34, 34, 34, 34]
check_result4.save()
for i in range(30):
check4_history = CheckHistory()
check4_history.check_id = check4.id
check4_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
check4_history.check_id = check4.pk
check4_history.agent_id = agent.agent_id
check4_history.x = django_now - djangotime.timedelta(minutes=i * 2)
check4_history.y = random.randint(2, 79)
check4_history.save()
# script check storage pool
check5 = Check()
check5.agent = agent
check5.check_type = "script"
check5.status = "passing"
check5.last_run = djangotime.now()
check5.check_type = CheckType.SCRIPT
check5.email_alert = random.choice([True, False])
check5.text_alert = random.choice([True, False])
check5.timeout = 120
check5.retcode = 0
check5.execution_time = "4.0000"
check5.script = check_pool_health
check5.save()
check_result5 = CheckResult()
check_result5.agent = agent
check_result5.assigned_check = check5
check_result5.status = CheckStatus.PASSING
check_result5.last_run = django_now
check_result5.retcode = 0
check_result5.execution_time = "4.0000"
check_result5.save()
for i in range(30):
check5_history = CheckHistory()
check5_history.check_id = check5.id
check5_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
check5_history.check_id = check5.pk
check5_history.agent_id = agent.agent_id
check5_history.x = django_now - djangotime.timedelta(minutes=i * 2)
if i == 10 or i == 18:
check5_history.y = 1
else:
@@ -480,28 +565,34 @@ class Command(BaseCommand):
check5_history.save()
check6 = Check()
check6.agent = agent
check6.check_type = "script"
check6.status = "passing"
check6.last_run = djangotime.now()
check6.check_type = CheckType.SCRIPT
check6.email_alert = random.choice([True, False])
check6.text_alert = random.choice([True, False])
check6.timeout = 120
check6.retcode = 0
check6.execution_time = "4.0000"
check6.script = check_net_aware
check6.save()
check_result6 = CheckResult()
check_result6.agent = agent
check_result6.assigned_check = check6
check_result6.status = CheckStatus.PASSING
check_result6.last_run = django_now
check_result6.retcode = 0
check_result6.execution_time = "4.0000"
check_result6.save()
for i in range(30):
check6_history = CheckHistory()
check6_history.check_id = check6.id
check6_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
check6_history.check_id = check6.pk
check6_history.agent_id = agent.agent_id
check6_history.x = django_now - djangotime.timedelta(minutes=i * 2)
check6_history.y = 0
check6_history.save()
nla_task = AutomatedTask()
nla_task.agent = agent
actions = [
{
@@ -516,15 +607,20 @@ class Command(BaseCommand):
nla_task.assigned_check = check6
nla_task.name = "Restart NLA"
nla_task.task_type = "checkfailure"
nla_task.win_task_name = "demotask123"
nla_task.execution_time = "1.8443"
nla_task.last_run = djangotime.now()
nla_task.stdout = "no stdout"
nla_task.retcode = 0
nla_task.sync_status = "synced"
nla_task.save()
nla_task_result = TaskResult()
nla_task_result.task = nla_task
nla_task_result.agent = agent
nla_task_result.execution_time = "1.8443"
nla_task_result.last_run = django_now
nla_task_result.stdout = "no stdout"
nla_task_result.retcode = 0
nla_task_result.sync_status = "synced"
nla_task_result.save()
spool_task = AutomatedTask()
spool_task.agent = agent
actions = [
{
@@ -538,22 +634,24 @@ class Command(BaseCommand):
spool_task.actions = actions
spool_task.name = "Clear the print spooler"
spool_task.task_type = "daily"
spool_task.run_time_date = djangotime.now() + djangotime.timedelta(
minutes=10
)
spool_task.expire_date = djangotime.now() + djangotime.timedelta(days=753)
spool_task.run_time_date = django_now + djangotime.timedelta(minutes=10)
spool_task.expire_date = django_now + djangotime.timedelta(days=753)
spool_task.daily_interval = 1
spool_task.weekly_interval = 1
spool_task.task_repetition_duration = "2h"
spool_task.task_repetition_interval = "25m"
spool_task.random_task_delay = "3m"
spool_task.win_task_name = "demospool123"
spool_task.last_run = djangotime.now()
spool_task.retcode = 0
spool_task.stdout = spooler_stdout
spool_task.sync_status = "synced"
spool_task.save()
spool_task_result = TaskResult()
spool_task_result.task = spool_task
spool_task_result.agent = agent
spool_task_result.last_run = django_now
spool_task_result.retcode = 0
spool_task_result.stdout = spooler_stdout
spool_task_result.sync_status = "synced"
spool_task_result.save()
tmp_dir_task = AutomatedTask()
tmp_dir_task.agent = agent
tmp_dir_task.name = "show temp dir files"
@@ -568,129 +666,147 @@ class Command(BaseCommand):
]
tmp_dir_task.actions = actions
tmp_dir_task.task_type = "manual"
tmp_dir_task.win_task_name = "demotemp"
tmp_dir_task.last_run = djangotime.now()
tmp_dir_task.stdout = temp_dir_stdout
tmp_dir_task.retcode = 0
tmp_dir_task.sync_status = "synced"
tmp_dir_task.save()
tmp_dir_task_result = TaskResult()
tmp_dir_task_result.task = tmp_dir_task
tmp_dir_task_result.agent = agent
tmp_dir_task_result.last_run = django_now
tmp_dir_task_result.stdout = temp_dir_stdout
tmp_dir_task_result.retcode = 0
tmp_dir_task_result.sync_status = "synced"
tmp_dir_task_result.save()
check7 = Check()
check7.agent = agent
check7.check_type = "script"
check7.status = "passing"
check7.last_run = djangotime.now()
check7.check_type = CheckType.SCRIPT
check7.email_alert = random.choice([True, False])
check7.text_alert = random.choice([True, False])
check7.timeout = 120
check7.retcode = 0
check7.execution_time = "3.1337"
check7.script = clear_spool
check7.stdout = spooler_stdout
check7.save()
check_result7 = CheckResult()
check_result7.assigned_check = check7
check_result7.agent = agent
check_result7.status = CheckStatus.PASSING
check_result7.last_run = django_now
check_result7.retcode = 0
check_result7.execution_time = "3.1337"
check_result7.stdout = spooler_stdout
check_result7.save()
for i in range(30):
check7_history = CheckHistory()
check7_history.check_id = check7.id
check7_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
check7_history.check_id = check7.pk
check7_history.agent_id = agent.agent_id
check7_history.x = django_now - djangotime.timedelta(minutes=i * 2)
check7_history.y = 0
check7_history.save()
check8 = Check()
check8.agent = agent
check8.check_type = "winsvc"
check8.status = "passing"
check8.last_run = djangotime.now()
check8.email_alert = random.choice([True, False])
check8.text_alert = random.choice([True, False])
check8.more_info = "Status RUNNING"
check8.fails_b4_alert = 4
check8.svc_name = "Spooler"
check8.svc_display_name = "Print Spooler"
check8.pass_if_start_pending = False
check8.restart_if_stopped = True
check8.save()
if agent.plat == "windows":
check8 = Check()
check8.agent = agent
check8.check_type = CheckType.WINSVC
check8.email_alert = random.choice([True, False])
check8.text_alert = random.choice([True, False])
check8.fails_b4_alert = 4
check8.svc_name = "Spooler"
check8.svc_display_name = "Print Spooler"
check8.pass_if_start_pending = False
check8.restart_if_stopped = True
check8.save()
for i in range(30):
check8_history = CheckHistory()
check8_history.check_id = check8.id
check8_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
if i == 10 or i == 18:
check8_history.y = 1
check8_history.results = "Status STOPPED"
check_result8 = CheckResult()
check_result8.assigned_check = check8
check_result8.agent = agent
check_result8.status = CheckStatus.PASSING
check_result8.last_run = django_now
check_result8.more_info = "Status RUNNING"
check_result8.save()
for i in range(30):
check8_history = CheckHistory()
check8_history.check_id = check8.pk
check8_history.agent_id = agent.agent_id
check8_history.x = django_now - djangotime.timedelta(minutes=i * 2)
if i == 10 or i == 18:
check8_history.y = 1
check8_history.results = "Status STOPPED"
else:
check8_history.y = 0
check8_history.results = "Status RUNNING"
check8_history.save()
check9 = Check()
check9.agent = agent
check9.check_type = CheckType.EVENT_LOG
check9.name = "unexpected shutdown"
check9.email_alert = random.choice([True, False])
check9.text_alert = random.choice([True, False])
check9.fails_b4_alert = 2
check9.log_name = EvtLogNames.APPLICATION
check9.event_id = 1001
check9.event_type = EvtLogTypes.INFO
check9.fail_when = EvtLogFailWhen.CONTAINS
check9.search_last_days = 30
check_result9 = CheckResult()
check_result9.agent = agent
check_result9.assigned_check = check9
check_result9.last_run = django_now
if site in sites5:
check_result9.extra_details = eventlog_check_fail_data
check_result9.status = CheckStatus.FAILING
else:
check8_history.y = 0
check8_history.results = "Status RUNNING"
check8_history.save()
check_result9.extra_details = {"log": []}
check_result9.status = CheckStatus.PASSING
check9 = Check()
check9.agent = agent
check9.check_type = "eventlog"
check9.name = "unexpected shutdown"
check9.save()
check_result9.save()
check9.last_run = djangotime.now()
check9.email_alert = random.choice([True, False])
check9.text_alert = random.choice([True, False])
check9.fails_b4_alert = 2
for i in range(30):
check9_history = CheckHistory()
check9_history.check_id = check9.pk
check9_history.agent_id = agent.agent_id
check9_history.x = django_now - djangotime.timedelta(minutes=i * 2)
if i == 10 or i == 18:
check9_history.y = 1
check9_history.results = "Events Found: 16"
else:
check9_history.y = 0
check9_history.results = "Events Found: 0"
check9_history.save()
if site in sites5:
check9.extra_details = eventlog_check_fail_data
check9.status = "failing"
else:
check9.extra_details = {"log": []}
check9.status = "passing"
pick = random.randint(1, 10)
check9.log_name = "Application"
check9.event_id = 1001
check9.event_type = "INFO"
check9.fail_when = "contains"
check9.search_last_days = 30
if pick == 5 or pick == 3:
check9.save()
reboot_time = django_now + djangotime.timedelta(
minutes=random.randint(1000, 500000)
)
date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M")
for i in range(30):
check9_history = CheckHistory()
check9_history.check_id = check9.id
check9_history.x = djangotime.now() - djangotime.timedelta(
minutes=i * 2
)
if i == 10 or i == 18:
check9_history.y = 1
check9_history.results = "Events Found: 16"
else:
check9_history.y = 0
check9_history.results = "Events Found: 0"
check9_history.save()
obj = dt.datetime.strptime(date_obj, "%Y-%m-%d %H:%M")
pick = random.randint(1, 10)
task_name = "TacticalRMM_SchedReboot_" + "".join(
random.choice(string.ascii_letters) for _ in range(10)
)
if pick == 5 or pick == 3:
reboot_time = djangotime.now() + djangotime.timedelta(
minutes=random.randint(1000, 500000)
)
date_obj = dt.datetime.strftime(reboot_time, "%Y-%m-%d %H:%M")
obj = dt.datetime.strptime(date_obj, "%Y-%m-%d %H:%M")
task_name = "TacticalRMM_SchedReboot_" + "".join(
random.choice(string.ascii_letters) for _ in range(10)
)
sched_reboot = PendingAction()
sched_reboot.agent = agent
sched_reboot.action_type = "schedreboot"
sched_reboot.details = {
"time": str(obj),
"taskname": task_name,
}
sched_reboot.save()
sched_reboot = PendingAction()
sched_reboot.agent = agent
sched_reboot.action_type = PAAction.SCHED_REBOOT
sched_reboot.details = {
"time": str(obj),
"taskname": task_name,
}
sched_reboot.save()
self.stdout.write(self.style.SUCCESS(f"Added agent # {count_agents + 1}"))
call_command("load_demo_scripts")
self.stdout.write("done")

View File

@@ -1,7 +1,8 @@
from agents.models import Agent
from django.conf import settings
from django.core.management.base import BaseCommand
from agents.models import Agent
class Command(BaseCommand):
help = "Shows online agents that are not on the latest version"

View File

@@ -1,10 +1,10 @@
from agents.models import Agent
from agents.tasks import send_agent_update_task
from core.models import CoreSettings
from django.conf import settings
from django.core.management.base import BaseCommand
from packaging import version as pyver
from agents.models import Agent
from agents.tasks import send_agent_update_task
from core.utils import get_core_settings
from tacticalrmm.constants import AGENT_DEFER
@@ -12,8 +12,8 @@ class Command(BaseCommand):
help = "Triggers an agent update task to run"
def handle(self, *args, **kwargs):
core = CoreSettings.objects.first()
if not core.agent_auto_update: # type: ignore
core = get_core_settings()
if not core.agent_auto_update:
return
q = Agent.objects.defer(*AGENT_DEFER).exclude(version=settings.LATEST_AGENT_VER)

View File

@@ -0,0 +1,26 @@
# Generated by Django 4.0.3 on 2022-04-07 17:28
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clients', '0020_auto_20211226_0547'),
('agents', '0046_alter_agenthistory_command'),
]
operations = [
migrations.AlterField(
model_name='agent',
name='plat',
field=models.CharField(default='windows', max_length=255),
),
migrations.AlterField(
model_name='agent',
name='site',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.RESTRICT, related_name='agents', to='clients.site'),
preserve_default=False,
),
]

View File

@@ -0,0 +1,21 @@
# Generated by Django 4.0.3 on 2022-04-16 17:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0047_alter_agent_plat_alter_agent_site'),
]
operations = [
migrations.RemoveField(
model_name='agent',
name='has_patches_pending',
),
migrations.RemoveField(
model_name='agent',
name='pending_actions_count',
),
]

View File

@@ -0,0 +1,17 @@
# Generated by Django 4.0.3 on 2022-04-18 14:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0048_remove_agent_has_patches_pending_and_more'),
]
operations = [
migrations.AddIndex(
model_name='agent',
index=models.Index(fields=['monitoring_type'], name='agents_agen_monitor_df8816_idx'),
),
]

View File

@@ -0,0 +1,17 @@
# Generated by Django 4.0.4 on 2022-04-25 06:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0049_agent_agents_agen_monitor_df8816_idx'),
]
operations = [
migrations.RemoveField(
model_name='agent',
name='plat_release',
),
]

View File

@@ -1,38 +1,51 @@
import asyncio
import base64
import re
import time
from collections import Counter
from distutils.version import LooseVersion
from typing import Any
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
import msgpack
import nats
import validators
from asgiref.sync import sync_to_async
from core.models import TZ_CHOICES, CoreSettings
from Crypto.Cipher import AES
from Crypto.Hash import SHA3_384
from Crypto.Random import get_random_bytes
from Crypto.Util.Padding import pad
from django.conf import settings
from django.contrib.postgres.fields import ArrayField
from django.core.cache import cache
from django.db import models
from django.utils import timezone as djangotime
from logs.models import BaseAuditModel, DebugLog
from nats.errors import TimeoutError
from packaging import version as pyver
from core.models import TZ_CHOICES
from core.utils import get_core_settings, send_command_with_mesh
from logs.models import BaseAuditModel, DebugLog
from tacticalrmm.constants import ONLINE_AGENTS, CheckStatus, CheckType, DebugLogType
from tacticalrmm.models import PermissionQuerySet
if TYPE_CHECKING:
from alerts.models import Alert, AlertTemplate
from automation.models import Policy
from autotasks.models import AutomatedTask
from checks.models import Check
from clients.models import Client
from winupdate.models import WinUpdatePolicy
# type helpers
Disk = Union[Dict[str, Any], str]
class Agent(BaseAuditModel):
class Meta:
indexes = [
models.Index(fields=["monitoring_type"]),
]
objects = PermissionQuerySet.as_manager()
version = models.CharField(default="0.1.0", max_length=255)
operating_system = models.CharField(null=True, blank=True, max_length=255)
plat = models.CharField(max_length=255, null=True, blank=True)
plat = models.CharField(max_length=255, default="windows")
goarch = models.CharField(max_length=255, null=True, blank=True)
plat_release = models.CharField(max_length=255, null=True, blank=True)
hostname = models.CharField(max_length=255)
agent_id = models.CharField(max_length=200, unique=True)
last_seen = models.DateTimeField(null=True, blank=True)
@@ -61,8 +74,6 @@ class Agent(BaseAuditModel):
)
maintenance_mode = models.BooleanField(default=False)
block_policy_inheritance = models.BooleanField(default=False)
pending_actions_count = models.PositiveIntegerField(default=0)
has_patches_pending = models.BooleanField(default=False)
alert_template = models.ForeignKey(
"alerts.AlertTemplate",
related_name="agents",
@@ -73,9 +84,7 @@ class Agent(BaseAuditModel):
site = models.ForeignKey(
"clients.Site",
related_name="agents",
null=True,
blank=True,
on_delete=models.SET_NULL,
on_delete=models.RESTRICT,
)
policy = models.ForeignKey(
"automation.Policy",
@@ -85,49 +94,27 @@ class Agent(BaseAuditModel):
on_delete=models.SET_NULL,
)
def save(self, *args, **kwargs):
# get old agent if exists
old_agent = Agent.objects.get(pk=self.pk) if self.pk else None
super(Agent, self).save(old_model=old_agent, *args, **kwargs)
# check if new agent has been created
# or check if policy have changed on agent
# or if site has changed on agent and if so generate policies
# or if agent was changed from server or workstation
if (
not old_agent
or (old_agent and old_agent.policy != self.policy)
or (old_agent.site != self.site)
or (old_agent.monitoring_type != self.monitoring_type)
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
):
from automation.tasks import generate_agent_checks_task
generate_agent_checks_task.delay(agents=[self.pk], create_tasks=True)
def __str__(self):
def __str__(self) -> str:
return self.hostname
@property
def client(self):
def client(self) -> "Client":
return self.site.client
@property
def timezone(self):
def timezone(self) -> str:
# return the default timezone unless the timezone is explicity set per agent
if self.time_zone is not None:
if self.time_zone:
return self.time_zone
else:
from core.models import CoreSettings
return CoreSettings.objects.first().default_time_zone # type: ignore
return get_core_settings().default_time_zone
@property
def is_posix(self):
def is_posix(self) -> bool:
return self.plat == "linux" or self.plat == "darwin"
@property
def arch(self):
def arch(self) -> Optional[str]:
if self.is_posix:
return self.goarch
@@ -139,7 +126,7 @@ class Agent(BaseAuditModel):
return None
@property
def winagent_dl(self):
def winagent_dl(self) -> Optional[str]:
if self.arch == "64":
return settings.DL_64
elif self.arch == "32":
@@ -147,7 +134,7 @@ class Agent(BaseAuditModel):
return None
@property
def win_inno_exe(self):
def win_inno_exe(self) -> Optional[str]:
if self.arch == "64":
return f"winagent-v{settings.LATEST_AGENT_VER}.exe"
elif self.arch == "32":
@@ -155,7 +142,7 @@ class Agent(BaseAuditModel):
return None
@property
def status(self):
def status(self) -> str:
offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time)
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
@@ -170,21 +157,40 @@ class Agent(BaseAuditModel):
return "offline"
@property
def checks(self):
def checks(self) -> Dict[str, Any]:
from checks.models import CheckResult
total, passing, failing, warning, info = 0, 0, 0, 0, 0
if self.agentchecks.exists(): # type: ignore
for i in self.agentchecks.all(): # type: ignore
total += 1
if i.status == "passing":
passing += 1
elif i.status == "failing":
if i.alert_severity == "error":
failing += 1
elif i.alert_severity == "warning":
warning += 1
elif i.alert_severity == "info":
info += 1
for check in self.get_checks_with_policies(exclude_overridden=True):
total += 1
if (
not hasattr(check.check_result, "status")
or isinstance(check.check_result, CheckResult)
and check.check_result.status == CheckStatus.PASSING
):
passing += 1
elif (
isinstance(check.check_result, CheckResult)
and check.check_result.status == CheckStatus.FAILING
):
alert_severity = (
check.check_result.alert_severity
if check.check_type
in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]
else check.alert_severity
)
if alert_severity == "error":
failing += 1
elif alert_severity == "warning":
warning += 1
elif alert_severity == "info":
info += 1
ret = {
"total": total,
@@ -197,10 +203,10 @@ class Agent(BaseAuditModel):
return ret
@property
def cpu_model(self):
def cpu_model(self) -> List[str]:
if self.is_posix:
try:
return self.wmi_detail["cpus"]
return cast(List[str], self.wmi_detail["cpus"])
except:
return ["unknown cpu model"]
@@ -214,12 +220,13 @@ class Agent(BaseAuditModel):
return ["unknown cpu model"]
@property
def graphics(self):
def graphics(self) -> str:
if self.is_posix:
try:
if not self.wmi_detail["gpus"]:
return "No graphics cards"
return self.wmi_detail["gpus"]
return ", ".join(self.wmi_detail["gpus"])
except:
return "Error getting graphics cards"
@@ -243,7 +250,7 @@ class Agent(BaseAuditModel):
return "Graphics info requires agent v1.4.14"
@property
def local_ips(self):
def local_ips(self) -> str:
if self.is_posix:
try:
return ", ".join(self.wmi_detail["local_ips"])
@@ -270,15 +277,15 @@ class Agent(BaseAuditModel):
ret.append(ip)
if len(ret) == 1:
return ret[0]
return cast(str, ret[0])
else:
return ", ".join(ret) if ret else "error getting local ips"
@property
def make_model(self):
def make_model(self) -> str:
if self.is_posix:
try:
return self.wmi_detail["make_model"]
return cast(str, self.wmi_detail["make_model"])
except:
return "error getting make/model"
@@ -304,17 +311,17 @@ class Agent(BaseAuditModel):
try:
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
return cast(str, [x["Version"] for x in comp_sys_prod if "Version" in x][0])
except:
pass
return "unknown make/model"
@property
def physical_disks(self):
def physical_disks(self) -> Sequence[Disk]:
if self.is_posix:
try:
return self.wmi_detail["disks"]
return cast(List[Disk], self.wmi_detail["disks"])
except:
return ["unknown disk"]
@@ -339,14 +346,90 @@ class Agent(BaseAuditModel):
except:
return ["unknown disk"]
def is_supported_script(self, platforms: list) -> bool:
@classmethod
def online_agents(cls, min_version: str = "") -> "List[Agent]":
if min_version:
return [
i
for i in cls.objects.only(*ONLINE_AGENTS)
if pyver.parse(i.version) >= pyver.parse(min_version)
and i.status == "online"
]
return [i for i in cls.objects.only(*ONLINE_AGENTS) if i.status == "online"]
def is_supported_script(self, platforms: List[str]) -> bool:
return self.plat.lower() in platforms if platforms else True
def get_agent_policies(self):
def get_checks_with_policies(
self, exclude_overridden: bool = False
) -> "List[Check]":
if exclude_overridden:
checks = (
list(
check
for check in self.agentchecks.all()
if not check.overridden_by_policy
)
+ self.get_checks_from_policies()
)
else:
checks = list(self.agentchecks.all()) + self.get_checks_from_policies()
return self.add_check_results(checks)
def get_tasks_with_policies(self) -> "List[AutomatedTask]":
tasks = list(self.autotasks.all()) + self.get_tasks_from_policies()
return self.add_task_results(tasks)
def add_task_results(self, tasks: "List[AutomatedTask]") -> "List[AutomatedTask]":
results = self.taskresults.all() # type: ignore
for task in tasks:
for result in results:
if result.task.id == task.pk:
task.task_result = result
break
return tasks
def add_check_results(self, checks: "List[Check]") -> "List[Check]":
results = self.checkresults.all() # type: ignore
for check in checks:
for result in results:
if result.assigned_check.id == check.pk:
check.check_result = result
break
return checks
def get_agent_policies(self) -> "Dict[str, Optional[Policy]]":
from checks.models import Check
site_policy = getattr(self.site, f"{self.monitoring_type}_policy", None)
client_policy = getattr(self.client, f"{self.monitoring_type}_policy", None)
default_policy = getattr(
CoreSettings.objects.first(), f"{self.monitoring_type}_policy", None
get_core_settings(), f"{self.monitoring_type}_policy", None
)
# prefetch excluded objects on polices only if policy is not Non
models.prefetch_related_objects(
[
policy
for policy in [self.policy, site_policy, client_policy, default_policy]
if policy
],
"excluded_agents",
"excluded_sites",
"excluded_clients",
models.Prefetch(
"policychecks", queryset=Check.objects.select_related("script")
),
"autotasks",
)
return {
@@ -373,21 +456,18 @@ class Agent(BaseAuditModel):
def check_run_interval(self) -> int:
interval = self.check_interval
# determine if any agent checks have a custom interval and set the lowest interval
for check in self.agentchecks.filter(overriden_by_policy=False): # type: ignore
for check in self.get_checks_with_policies():
if check.run_interval and check.run_interval < interval:
# don't allow check runs less than 15s
if check.run_interval < 15:
interval = 15
else:
interval = check.run_interval
interval = 15 if check.run_interval < 15 else check.run_interval
return interval
def run_script(
self,
scriptpk: int,
args: list[str] = [],
args: List[str] = [],
timeout: int = 120,
full: bool = False,
wait: bool = False,
@@ -424,15 +504,7 @@ class Agent(BaseAuditModel):
if r == "pong":
running_agent = self
else:
online = [
agent
for agent in Agent.objects.only(
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
)
if agent.status == "online"
]
for agent in online:
for agent in Agent.online_agents():
r = asyncio.run(agent.nats_cmd(nats_ping, timeout=1))
if r == "pong":
running_agent = agent
@@ -449,49 +521,44 @@ class Agent(BaseAuditModel):
return "ok"
# auto approves updates
def approve_updates(self):
def approve_updates(self) -> None:
patch_policy = self.get_patch_policy()
updates = list()
severity_list = list()
if patch_policy.critical == "approve":
updates += self.winupdates.filter( # type: ignore
severity="Critical", installed=False
).exclude(action="approve")
severity_list.append("Critical")
if patch_policy.important == "approve":
updates += self.winupdates.filter( # type: ignore
severity="Important", installed=False
).exclude(action="approve")
severity_list.append("Important")
if patch_policy.moderate == "approve":
updates += self.winupdates.filter( # type: ignore
severity="Moderate", installed=False
).exclude(action="approve")
severity_list.append("Moderate")
if patch_policy.low == "approve":
updates += self.winupdates.filter(severity="Low", installed=False).exclude( # type: ignore
action="approve"
)
severity_list.append("Low")
if patch_policy.other == "approve":
updates += self.winupdates.filter(severity="", installed=False).exclude( # type: ignore
action="approve"
)
severity_list.append("")
for update in updates:
update.action = "approve"
update.save(update_fields=["action"])
self.winupdates.filter(severity__in=severity_list, installed=False).exclude(
action="approve"
).update(action="approve")
# returns agent policy merged with a client or site specific policy
def get_patch_policy(self):
def get_patch_policy(self) -> "WinUpdatePolicy":
from winupdate.models import WinUpdatePolicy
# check if site has a patch policy and if so use it
patch_policy = None
agent_policy = self.winupdatepolicy.first() # type: ignore
agent_policy = self.winupdatepolicy.first()
if not agent_policy:
agent_policy = WinUpdatePolicy.objects.create(agent=self)
policies = self.get_agent_policies()
processed_policies = list()
processed_policies: List[int] = list()
for _, policy in policies.items():
if (
policy
@@ -545,12 +612,13 @@ class Agent(BaseAuditModel):
# sets alert template assigned in the following order: policy, site, client, global
# sets None if nothing is found
def set_alert_template(self):
core = CoreSettings.objects.first()
def set_alert_template(self) -> "Optional[AlertTemplate]":
core = get_core_settings()
policies = self.get_agent_policies()
# loop through all policies applied to agent and return an alert_template if found
processed_policies = list()
processed_policies: List[int] = list()
for key, policy in policies.items():
# default alert_template will override a default policy with alert template applied
if (
@@ -598,49 +666,68 @@ class Agent(BaseAuditModel):
return None
def generate_checks_from_policies(self):
def get_or_create_alert_if_needed(
self, alert_template: "Optional[AlertTemplate]"
) -> "Optional[Alert]":
from alerts.models import Alert
return Alert.create_or_return_availability_alert(
self, skip_create=not self.should_create_alert(alert_template)
)
def get_checks_from_policies(self) -> "List[Check]":
from automation.models import Policy
# Clear agent checks that have overriden_by_policy set
self.agentchecks.update(overriden_by_policy=False) # type: ignore
# check if agent is blocking inheritance
if self.block_policy_inheritance or self.agentchecks.exists():
cache_key = f"agent_{self.agent_id}_checks"
# Generate checks based on policies
Policy.generate_policy_checks(self)
elif self.policy:
cache_key = f"site_{self.monitoring_type}_{self.site_id}_policy_{self.policy_id}_checks"
def generate_tasks_from_policies(self):
else:
cache_key = f"site_{self.monitoring_type}_{self.site_id}_checks"
cached_checks = cache.get(cache_key)
if isinstance(cached_checks, list):
return cached_checks
else:
# clear agent checks that have overridden_by_policy set
self.agentchecks.update(overridden_by_policy=False) # type: ignore
# get agent checks based on policies
checks = Policy.get_policy_checks(self)
cache.set(cache_key, checks, 600)
return checks
def get_tasks_from_policies(self) -> "List[AutomatedTask]":
from automation.models import Policy
# Generate tasks based on policies
Policy.generate_policy_tasks(self)
# check if agent is blocking inheritance
if self.block_policy_inheritance:
cache_key = f"agent_{self.agent_id}_tasks"
# https://github.com/Ylianst/MeshCentral/issues/59#issuecomment-521965347
def get_login_token(self, key, user, action=3):
try:
key = bytes.fromhex(key)
key1 = key[0:48]
key2 = key[48:]
msg = '{{"a":{}, "u":"{}","time":{}}}'.format(
action, user.lower(), int(time.time())
)
iv = get_random_bytes(16)
elif self.policy:
cache_key = f"site_{self.monitoring_type}_{self.site_id}_policy_{self.policy_id}_tasks"
# sha
h = SHA3_384.new()
h.update(key1)
hashed_msg = h.digest() + msg.encode()
else:
cache_key = f"site_{self.monitoring_type}_{self.site_id}_tasks"
# aes
cipher = AES.new(key2, AES.MODE_CBC, iv)
msg = cipher.encrypt(pad(hashed_msg, 16))
cached_tasks = cache.get(cache_key)
if isinstance(cached_tasks, list):
return cached_tasks
else:
# get agent tasks based on policies
tasks = Policy.get_policy_tasks(self)
cache.set(f"site_{self.site_id}_tasks", tasks, 600)
return tasks
return base64.b64encode(iv + msg, altchars=b"@$").decode("utf-8")
except Exception:
return "err"
def _do_nats_debug(self, agent: "Agent", message: str) -> None:
DebugLog.error(agent=agent, log_type=DebugLogType.AGENT_ISSUES, message=message)
def _do_nats_debug(self, agent, message):
DebugLog.error(agent=agent, log_type="agent_issues", message=message)
async def nats_cmd(self, data: dict, timeout: int = 30, wait: bool = True):
async def nats_cmd(
self, data: Dict[Any, Any], timeout: int = 30, wait: bool = True
) -> Any:
options = {
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
"user": "tacticalrmm",
@@ -663,7 +750,7 @@ class Agent(BaseAuditModel):
ret = "timeout"
else:
try:
ret = msgpack.loads(msg.data) # type: ignore
ret = msgpack.loads(msg.data)
except Exception as e:
ret = str(e)
await sync_to_async(self._do_nats_debug, thread_sensitive=False)(
@@ -677,27 +764,59 @@ class Agent(BaseAuditModel):
await nc.flush()
await nc.close()
def recover(self, mode: str, mesh_uri: str, wait: bool = True) -> tuple[str, bool]:
"""
Return type: tuple(message: str, error: bool)
"""
if mode == "tacagent":
if self.is_posix:
cmd = "systemctl restart tacticalagent.service"
shell = 3
else:
cmd = "net stop tacticalrmm & taskkill /F /IM tacticalrmm.exe & net start tacticalrmm"
shell = 1
asyncio.run(
send_command_with_mesh(cmd, mesh_uri, self.mesh_node_id, shell, 0)
)
return ("ok", False)
elif mode == "mesh":
data = {"func": "recover", "payload": {"mode": mode}}
if wait:
r = asyncio.run(self.nats_cmd(data, timeout=20))
if r == "ok":
return ("ok", False)
else:
return (str(r), True)
else:
asyncio.run(self.nats_cmd(data, timeout=20, wait=False))
return ("ok", False)
return ("invalid", True)
@staticmethod
def serialize(agent):
def serialize(agent: "Agent") -> Dict[str, Any]:
# serializes the agent and returns json
from .serializers import AgentAuditSerializer
return AgentAuditSerializer(agent).data
def delete_superseded_updates(self):
def delete_superseded_updates(self) -> None:
try:
pks = [] # list of pks to delete
kbs = list(self.winupdates.values_list("kb", flat=True)) # type: ignore
kbs = list(self.winupdates.values_list("kb", flat=True))
d = Counter(kbs)
dupes = [k for k, v in d.items() if v > 1]
for dupe in dupes:
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True) # type: ignore
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True)
# extract the version from the title and sort from oldest to newest
# skip if no version info is available therefore nothing to parse
try:
vers = [
re.search(r"\(Version(.*?)\)", i).group(1).strip() # type: ignore
re.search(r"\(Version(.*?)\)", i).group(1).strip()
for i in titles
]
sorted_vers = sorted(vers, key=LooseVersion)
@@ -705,16 +824,18 @@ class Agent(BaseAuditModel):
continue
# append all but the latest version to our list of pks to delete
for ver in sorted_vers[:-1]:
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver) # type: ignore
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver)
pks.append(q.first().pk)
pks = list(set(pks))
self.winupdates.filter(pk__in=pks).delete() # type: ignore
self.winupdates.filter(pk__in=pks).delete()
except:
pass
def should_create_alert(self, alert_template=None):
return (
def should_create_alert(
self, alert_template: "Optional[AlertTemplate]" = None
) -> bool:
return bool(
self.overdue_dashboard_alert
or self.overdue_email_alert
or self.overdue_text_alert
@@ -728,11 +849,10 @@ class Agent(BaseAuditModel):
)
)
def send_outage_email(self):
from core.models import CoreSettings
def send_outage_email(self) -> None:
CORE = get_core_settings()
CORE = CoreSettings.objects.first()
CORE.send_mail( # type: ignore
CORE.send_mail(
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
(
f"Data has not been received from client {self.client.name}, "
@@ -743,11 +863,10 @@ class Agent(BaseAuditModel):
alert_template=self.alert_template,
)
def send_recovery_email(self):
from core.models import CoreSettings
def send_recovery_email(self) -> None:
CORE = get_core_settings()
CORE = CoreSettings.objects.first()
CORE.send_mail( # type: ignore
CORE.send_mail(
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
(
f"Data has been received from client {self.client.name}, "
@@ -758,20 +877,18 @@ class Agent(BaseAuditModel):
alert_template=self.alert_template,
)
def send_outage_sms(self):
from core.models import CoreSettings
def send_outage_sms(self) -> None:
CORE = get_core_settings()
CORE = CoreSettings.objects.first()
CORE.send_sms( # type: ignore
CORE.send_sms(
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
alert_template=self.alert_template,
)
def send_recovery_sms(self):
from core.models import CoreSettings
def send_recovery_sms(self) -> None:
CORE = get_core_settings()
CORE = CoreSettings.objects.first()
CORE.send_sms( # type: ignore
CORE.send_sms(
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
alert_template=self.alert_template,
)
@@ -795,7 +912,7 @@ class Note(models.Model):
note = models.TextField(null=True, blank=True)
entry_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
def __str__(self) -> str:
return self.agent.hostname
@@ -823,26 +940,26 @@ class AgentCustomField(models.Model):
default=list,
)
def __str__(self):
def __str__(self) -> str:
return self.field.name
@property
def value(self):
def value(self) -> Union[List[Any], bool, str]:
if self.field.type == "multiple":
return self.multiple_value
return cast(List[str], self.multiple_value)
elif self.field.type == "checkbox":
return self.bool_value
else:
return self.string_value
return cast(str, self.string_value)
def save_to_field(self, value):
def save_to_field(self, value: Union[List[Any], bool, str]) -> None:
if self.field.type in [
"text",
"number",
"single",
"datetime",
]:
self.string_value = value
self.string_value = cast(str, value)
self.save()
elif self.field.type == "multiple":
self.multiple_value = value.split(",")
@@ -888,5 +1005,5 @@ class AgentHistory(models.Model):
)
script_results = models.JSONField(null=True, blank=True)
def __str__(self):
def __str__(self) -> str:
return f"{self.agent.hostname} - {self.type}"

View File

@@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
class AgentPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
if "agent_id" in view.kwargs.keys():
return _has_perm(r, "can_list_agents") and _has_perm_on_agent(
@@ -26,73 +26,73 @@ class AgentPerms(permissions.BasePermission):
class RecoverAgentPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_recover_agents") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class MeshPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_use_mesh") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class UpdateAgentPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_update_agents")
class PingAgentPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_ping_agents") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class ManageProcPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_manage_procs") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class EvtLogPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_view_eventlogs") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class SendCMDPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_send_cmd") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class RebootAgentPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_reboot_agents") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class InstallAgentPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_install_agents")
class RunScriptPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_run_scripts") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)
class AgentNotesPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
# permissions for GET /agents/notes/ endpoint
if r.method == "GET":
@@ -109,12 +109,12 @@ class AgentNotesPerms(permissions.BasePermission):
class RunBulkPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_run_bulk")
class AgentHistoryPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if "agent_id" in view.kwargs.keys():
return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]

View File

@@ -1,5 +1,6 @@
import pytz
from rest_framework import serializers
from winupdate.serializers import WinUpdatePolicySerializer
from .models import Agent, AgentCustomField, AgentHistory, Note
@@ -79,13 +80,15 @@ class AgentSerializer(serializers.ModelSerializer):
class AgentTableSerializer(serializers.ModelSerializer):
status = serializers.ReadOnlyField()
checks = serializers.ReadOnlyField()
last_seen = serializers.SerializerMethodField()
client_name = serializers.ReadOnlyField(source="client.name")
site_name = serializers.ReadOnlyField(source="site.name")
logged_username = serializers.SerializerMethodField()
italic = serializers.SerializerMethodField()
policy = serializers.ReadOnlyField(source="policy.id")
alert_template = serializers.SerializerMethodField()
last_seen = serializers.ReadOnlyField()
pending_actions_count = serializers.ReadOnlyField()
has_patches_pending = serializers.ReadOnlyField()
def get_alert_template(self, obj):
@@ -99,14 +102,6 @@ class AgentTableSerializer(serializers.ModelSerializer):
"always_alert": obj.alert_template.agent_always_alert,
}
def get_last_seen(self, obj) -> str:
if obj.time_zone is not None:
agent_tz = pytz.timezone(obj.time_zone)
else:
agent_tz = self.context["default_tz"]
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M")
def get_logged_username(self, obj) -> str:
if obj.logged_in_username == "None" and obj.status == "online":
return obj.last_logged_in_user
@@ -129,7 +124,6 @@ class AgentTableSerializer(serializers.ModelSerializer):
"monitoring_type",
"description",
"needs_reboot",
"has_patches_pending",
"pending_actions_count",
"status",
"overdue_text_alert",
@@ -145,6 +139,7 @@ class AgentTableSerializer(serializers.ModelSerializer):
"block_policy_inheritance",
"plat",
"goarch",
"has_patches_pending",
]
depth = 2
@@ -181,17 +176,12 @@ class AgentNoteSerializer(serializers.ModelSerializer):
class AgentHistorySerializer(serializers.ModelSerializer):
time = serializers.SerializerMethodField(read_only=True)
script_name = serializers.ReadOnlyField(source="script.name")
class Meta:
model = AgentHistory
fields = "__all__"
def get_time(self, history):
tz = self.context["default_tz"]
return history.time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")
class AgentAuditSerializer(serializers.ModelSerializer):
class Meta:

View File

@@ -2,18 +2,19 @@ import asyncio
import datetime as dt
import random
from time import sleep
from typing import Union
from typing import Optional
from django.conf import settings
from django.utils import timezone as djangotime
from packaging import version as pyver
from agents.models import Agent
from agents.utils import get_agent_url
from core.models import CoreSettings
from django.conf import settings
from django.utils import timezone as djangotime
from core.utils import get_core_settings
from logs.models import DebugLog, PendingAction
from packaging import version as pyver
from scripts.models import Script
from tacticalrmm.celery import app
from tacticalrmm.constants import CheckStatus, DebugLogType, PAAction, PAStatus
def agent_update(agent_id: str, force: bool = False) -> str:
@@ -27,7 +28,7 @@ def agent_update(agent_id: str, force: bool = False) -> str:
if agent.arch is None:
DebugLog.warning(
agent=agent,
log_type="agent_issues",
log_type=DebugLogType.AGENT_ISSUES,
message=f"Unable to determine arch on {agent.hostname}({agent.agent_id}). Skipping agent update.",
)
return "noarch"
@@ -38,15 +39,15 @@ def agent_update(agent_id: str, force: bool = False) -> str:
if not force:
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING
).exists():
agent.pendingactions.filter(
action_type="agentupdate", status="pending"
action_type=PAAction.AGENT_UPDATE, status=PAStatus.PENDING
).delete()
PendingAction.objects.create(
agent=agent,
action_type="agentupdate",
action_type=PAAction.AGENT_UPDATE,
details={
"url": url,
"version": version,
@@ -68,28 +69,26 @@ def agent_update(agent_id: str, force: bool = False) -> str:
@app.task
def force_code_sign(agent_ids: list[str]) -> None:
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
chunks = (agent_ids[i : i + 70] for i in range(0, len(agent_ids), 70))
for chunk in chunks:
for agent_id in chunk:
agent_update(agent_id=agent_id, force=True)
sleep(0.05)
sleep(4)
sleep(2)
@app.task
def send_agent_update_task(agent_ids: list[str]) -> None:
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
chunks = (agent_ids[i : i + 70] for i in range(0, len(agent_ids), 70))
for chunk in chunks:
for agent_id in chunk:
agent_update(agent_id)
sleep(0.05)
sleep(4)
sleep(2)
@app.task
def auto_self_agent_update_task() -> None:
core = CoreSettings.objects.first()
if not core.agent_auto_update: # type:ignore
core = get_core_settings()
if not core.agent_auto_update:
return
q = Agent.objects.only("agent_id", "version")
@@ -99,22 +98,24 @@ def auto_self_agent_update_task() -> None:
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
]
chunks = (agent_ids[i : i + 30] for i in range(0, len(agent_ids), 30))
chunks = (agent_ids[i : i + 70] for i in range(0, len(agent_ids), 70))
for chunk in chunks:
for agent_id in chunk:
agent_update(agent_id)
sleep(0.05)
sleep(4)
sleep(2)
@app.task
def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str:
def agent_outage_email_task(pk: int, alert_interval: Optional[float] = None) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
if not alert.email_sent:
sleep(random.randint(1, 15))
sleep(random.randint(1, 5))
alert.agent.send_outage_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
@@ -123,7 +124,7 @@ def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None)
# send an email only if the last email sent is older than alert interval
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.email_sent < delta:
sleep(random.randint(1, 10))
sleep(random.randint(1, 5))
alert.agent.send_outage_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
@@ -135,8 +136,13 @@ def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None)
def agent_recovery_email_task(pk: int) -> str:
from alerts.models import Alert
sleep(random.randint(1, 15))
alert = Alert.objects.get(pk=pk)
sleep(random.randint(1, 5))
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
alert.agent.send_recovery_email()
alert.resolved_email_sent = djangotime.now()
alert.save(update_fields=["resolved_email_sent"])
@@ -145,13 +151,16 @@ def agent_recovery_email_task(pk: int) -> str:
@app.task
def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str:
def agent_outage_sms_task(pk: int, alert_interval: Optional[float] = None) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
if not alert.sms_sent:
sleep(random.randint(1, 15))
sleep(random.randint(1, 3))
alert.agent.send_outage_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
@@ -160,7 +169,7 @@ def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) ->
# send an sms only if the last sms sent is older than alert interval
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.sms_sent < delta:
sleep(random.randint(1, 10))
sleep(random.randint(1, 3))
alert.agent.send_outage_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
@@ -173,7 +182,11 @@ def agent_recovery_sms_task(pk: int) -> str:
from alerts.models import Alert
sleep(random.randint(1, 3))
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
alert.agent.send_recovery_sms()
alert.resolved_sms_sent = djangotime.now()
alert.save(update_fields=["resolved_sms_sent"])
@@ -223,12 +236,12 @@ def run_script_email_results_task(
if r == "timeout":
DebugLog.error(
agent=agent,
log_type="scripting",
log_type=DebugLogType.SCRIPTING,
message=f"{agent.hostname}({agent.pk}) timed out running script.",
)
return
CORE = CoreSettings.objects.first()
CORE = get_core_settings()
subject = f"{agent.hostname} {script.name} Results"
exec_time = "{:.4f}".format(r["execution_time"])
body = (
@@ -241,25 +254,21 @@ def run_script_email_results_task(
msg = EmailMessage()
msg["Subject"] = subject
msg["From"] = CORE.smtp_from_email # type:ignore
msg["From"] = CORE.smtp_from_email
if emails:
msg["To"] = ", ".join(emails)
else:
msg["To"] = ", ".join(CORE.email_alert_recipients) # type:ignore
msg["To"] = ", ".join(CORE.email_alert_recipients)
msg.set_content(body)
try:
with smtplib.SMTP(
CORE.smtp_host, CORE.smtp_port, timeout=20 # type:ignore
) as server: # type:ignore
if CORE.smtp_requires_auth: # type:ignore
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
if CORE.smtp_requires_auth:
server.ehlo()
server.starttls()
server.login(
CORE.smtp_host_user, CORE.smtp_host_password # type:ignore
) # type:ignore
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
server.send_message(msg)
server.quit()
else:
@@ -271,18 +280,22 @@ def run_script_email_results_task(
@app.task
def clear_faults_task(older_than_days: int) -> None:
from alerts.models import Alert
# https://github.com/amidaware/tacticalrmm/issues/484
agents = Agent.objects.exclude(last_seen__isnull=True).filter(
last_seen__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
)
for agent in agents:
if agent.agentchecks.exists():
for check in agent.agentchecks.all():
# reset check status
check.status = "passing"
check.save(update_fields=["status"])
if check.alert.filter(resolved=False).exists():
check.alert.get(resolved=False).resolve()
for check in agent.get_checks_with_policies():
# reset check status
if check.check_result:
check.check_result.status = CheckStatus.PASSING
check.check_result.save(update_fields=["status"])
if check.alert.filter(agent=agent, resolved=False).exists():
alert = Alert.create_or_return_check_alert(check, agent=agent)
if alert:
alert.resolve()
# reset overdue alerts
agent.overdue_email_alert = False

View File

View File

@@ -1,51 +1,49 @@
import json
import os
from itertools import cycle
from typing import TYPE_CHECKING
from unittest.mock import patch
import pytz
from django.conf import settings
from django.test import modify_settings
from django.utils import timezone as djangotime
from logs.models import PendingAction
from model_bakery import baker
from packaging import version as pyver
from winupdate.models import WinUpdatePolicy
from winupdate.serializers import WinUpdatePolicySerializer
from tacticalrmm.test import TacticalTestCase
from .models import Agent, AgentCustomField, AgentHistory, Note
from .serializers import (
from agents.models import Agent, AgentCustomField, AgentHistory, Note
from agents.serializers import (
AgentHistorySerializer,
AgentHostnameSerializer,
AgentNoteSerializer,
AgentSerializer,
)
from .tasks import auto_self_agent_update_task
from agents.tasks import auto_self_agent_update_task
from logs.models import PendingAction
from tacticalrmm.constants import EvtLogNames, PAAction, PAStatus
from tacticalrmm.test import TacticalTestCase
from winupdate.models import WinUpdatePolicy
from winupdate.serializers import WinUpdatePolicySerializer
if TYPE_CHECKING:
from clients.models import Client, Site
base_url = "/agents"
@modify_settings(
MIDDLEWARE={
"remove": "tacticalrmm.middleware.LinuxMiddleware",
}
)
class TestAgentsList(TacticalTestCase):
def setUp(self):
def setUp(self) -> None:
self.authenticate()
self.setup_coresettings()
def test_get_agents(self):
def test_get_agents(self) -> None:
url = f"{base_url}/"
# 36 total agents
company1 = baker.make("clients.Client")
company2 = baker.make("clients.Client")
site1 = baker.make("clients.Site", client=company1)
site2 = baker.make("clients.Site", client=company1)
site3 = baker.make("clients.Site", client=company2)
company1: "Client" = baker.make("clients.Client")
company2: "Client" = baker.make("clients.Client")
site1: "Site" = baker.make("clients.Site", client=company1)
site2: "Site" = baker.make("clients.Site", client=company1)
site3: "Site" = baker.make("clients.Site", client=company2)
baker.make_recipe(
"agents.online_agent", site=site1, monitoring_type="server", _quantity=15
@@ -72,36 +70,31 @@ class TestAgentsList(TacticalTestCase):
# test all agents
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 36) # type: ignore
self.assertEqual(len(r.data), 36)
# test client1
r = self.client.get(f"{url}?client={company1.pk}", format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 25) # type: ignore
self.assertEqual(len(r.data), 25)
# test site3
r = self.client.get(f"{url}?site={site3.pk}", format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 11) # type: ignore
self.assertEqual(len(r.data), 11)
# test with no details
r = self.client.get(f"{url}?site={site3.pk}&detail=false", format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 11) # type: ignore
self.assertEqual(len(r.data), 11)
# make sure data is returned with the AgentHostnameSerializer
agents = Agent.objects.filter(site=site3)
serializer = AgentHostnameSerializer(agents, many=True)
self.assertEqual(r.data, serializer.data) # type: ignore
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
@modify_settings(
MIDDLEWARE={
"remove": "tacticalrmm.middleware.LinuxMiddleware",
}
)
class TestAgentViews(TacticalTestCase):
def setUp(self):
self.authenticate()
@@ -129,7 +122,7 @@ class TestAgentViews(TacticalTestCase):
url = f"{base_url}/{self.agent.agent_id}/"
data = {
"site": site.id, # type: ignore
"site": site.pk,
"monitoring_type": "workstation",
"description": "asjdk234andasd",
"offline_time": 4,
@@ -160,7 +153,7 @@ class TestAgentViews(TacticalTestCase):
agent = Agent.objects.get(pk=self.agent.pk)
data = AgentSerializer(agent).data
self.assertEqual(data["site"], site.id) # type: ignore
self.assertEqual(data["site"], site.pk)
policy = WinUpdatePolicy.objects.get(agent=self.agent)
data = WinUpdatePolicySerializer(policy).data
@@ -169,9 +162,9 @@ class TestAgentViews(TacticalTestCase):
# test adding custom fields
field = baker.make("core.CustomField", model="agent", type="number")
data = {
"site": site.id, # type: ignore
"site": site.pk,
"description": "asjdk234andasd",
"custom_fields": [{"field": field.id, "string_value": "123"}], # type: ignore
"custom_fields": [{"field": field.pk, "string_value": "123"}],
}
r = self.client.put(url, data, format="json")
@@ -182,9 +175,9 @@ class TestAgentViews(TacticalTestCase):
# test edit custom field
data = {
"site": site.id, # type: ignore
"site": site.pk,
"description": "asjdk234andasd",
"custom_fields": [{"field": field.id, "string_value": "456"}], # type: ignore
"custom_fields": [{"field": field.pk, "string_value": "456"}],
}
r = self.client.put(url, data, format="json")
@@ -242,7 +235,10 @@ class TestAgentViews(TacticalTestCase):
def test_get_agent_versions(self):
url = "/agents/versions/"
r = self.client.get(url)
with self.assertNumQueries(1):
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
assert any(i["hostname"] == self.agent.hostname for i in r.json()["agents"])
@@ -368,7 +364,7 @@ class TestAgentViews(TacticalTestCase):
"func": "eventlog",
"timeout": 30,
"payload": {
"logname": "Application",
"logname": EvtLogNames.APPLICATION,
"days": str(22),
},
},
@@ -383,7 +379,7 @@ class TestAgentViews(TacticalTestCase):
"func": "eventlog",
"timeout": 180,
"payload": {
"logname": "Security",
"logname": EvtLogNames.SECURITY,
"days": str(6),
},
},
@@ -423,7 +419,7 @@ class TestAgentViews(TacticalTestCase):
mock_ret.return_value = "nt authority\\system"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertIsInstance(r.data, str) # type: ignore
self.assertIsInstance(r.data, str)
mock_ret.return_value = "timeout"
r = self.client.post(url, data, format="json")
@@ -436,14 +432,14 @@ class TestAgentViews(TacticalTestCase):
url = f"{base_url}/{self.agent.agent_id}/reboot/"
data = {
"datetime": "2025-08-29 18:41",
"datetime": "2025-08-29T18:41:02",
}
nats_cmd.return_value = "ok"
r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM") # type: ignore
self.assertEqual(r.data["agent"], self.agent.hostname) # type: ignore
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
self.assertEqual(r.data["agent"], self.agent.hostname)
nats_data = {
"func": "schedtask",
@@ -454,7 +450,7 @@ class TestAgentViews(TacticalTestCase):
"start_when_available": False,
"multiple_instances": 2,
"trigger": "runonce",
"name": r.data["task_name"], # type: ignore
"name": r.data["task_name"],
"start_year": 2025,
"start_month": 8,
"start_day": 29,
@@ -479,7 +475,7 @@ class TestAgentViews(TacticalTestCase):
r = self.client.patch(url, data_invalid, format="json")
self.assertEqual(r.status_code, 400)
self.assertEqual(r.data, "Invalid date") # type: ignore
self.assertEqual(r.data, "Invalid date")
self.check_not_authenticated("patch", url)
@@ -488,8 +484,8 @@ class TestAgentViews(TacticalTestCase):
site = baker.make("clients.Site")
data = {
"client": site.client.id, # type: ignore
"site": site.id, # type: ignore
"client": site.client.pk,
"site": site.pk,
"arch": "64",
"expires": 23,
"installMethod": "manual",
@@ -519,7 +515,7 @@ class TestAgentViews(TacticalTestCase):
self.check_not_authenticated("post", url)
@patch("agents.models.Agent.get_login_token")
@patch("meshctrl.utils.get_login_token")
def test_meshcentral_tabs(self, mock_token):
url = f"{base_url}/{self.agent.agent_id}/meshcentral/"
mock_token.return_value = "askjh1k238uasdhk487234jadhsajksdhasd"
@@ -529,28 +525,24 @@ class TestAgentViews(TacticalTestCase):
# TODO
# decode the cookie
self.assertIn("&viewmode=13", r.data["file"]) # type: ignore
self.assertIn("&viewmode=12", r.data["terminal"]) # type: ignore
self.assertIn("&viewmode=11", r.data["control"]) # type: ignore
self.assertIn("&viewmode=13", r.data["file"])
self.assertIn("&viewmode=12", r.data["terminal"])
self.assertIn("&viewmode=11", r.data["control"])
self.assertIn("&gotonode=", r.data["file"]) # type: ignore
self.assertIn("&gotonode=", r.data["terminal"]) # type: ignore
self.assertIn("&gotonode=", r.data["control"]) # type: ignore
self.assertIn("&gotonode=", r.data["file"])
self.assertIn("&gotonode=", r.data["terminal"])
self.assertIn("&gotonode=", r.data["control"])
self.assertIn("?login=", r.data["file"]) # type: ignore
self.assertIn("?login=", r.data["terminal"]) # type: ignore
self.assertIn("?login=", r.data["control"]) # type: ignore
self.assertIn("?login=", r.data["file"])
self.assertIn("?login=", r.data["terminal"])
self.assertIn("?login=", r.data["control"])
self.assertEqual(self.agent.hostname, r.data["hostname"]) # type: ignore
self.assertEqual(self.agent.client.name, r.data["client"]) # type: ignore
self.assertEqual(self.agent.site.name, r.data["site"]) # type: ignore
self.assertEqual(self.agent.hostname, r.data["hostname"])
self.assertEqual(self.agent.client.name, r.data["client"])
self.assertEqual(self.agent.site.name, r.data["site"])
self.assertEqual(r.status_code, 200)
mock_token.return_value = "err"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.nats_cmd")
@@ -559,7 +551,7 @@ class TestAgentViews(TacticalTestCase):
nats_cmd.return_value = "ok"
r = self.client.post(url)
self.assertEqual(r.status_code, 200)
self.assertIn(self.agent.hostname, r.data) # type: ignore
self.assertIn(self.agent.hostname, r.data)
nats_cmd.assert_called_with(
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=90
)
@@ -577,10 +569,9 @@ class TestAgentViews(TacticalTestCase):
@patch("agents.tasks.run_script_email_results_task.delay")
@patch("agents.models.Agent.run_script")
def test_run_script(self, run_script, email_task):
from agents.models import AgentCustomField, AgentHistory, Note
from clients.models import ClientCustomField, SiteCustomField
from .models import AgentCustomField, AgentHistory, Note
run_script.return_value = "ok"
url = f"/agents/{self.agent.agent_id}/runscript/"
script = baker.make_recipe("scripts.script")
@@ -596,6 +587,9 @@ class TestAgentViews(TacticalTestCase):
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
if not hist:
raise AgentHistory.DoesNotExist
run_script.assert_called_with(
scriptpk=script.pk, args=[], timeout=18, wait=True, history_pk=hist.pk
)
@@ -644,6 +638,9 @@ class TestAgentViews(TacticalTestCase):
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
if not hist:
raise AgentHistory.DoesNotExist
run_script.assert_called_with(
scriptpk=script.pk, args=["hello", "world"], timeout=25, history_pk=hist.pk
)
@@ -658,13 +655,16 @@ class TestAgentViews(TacticalTestCase):
"output": "collector",
"args": ["hello", "world"],
"timeout": 22,
"custom_field": custom_field.id, # type: ignore
"custom_field": custom_field.pk,
"save_all_output": True,
}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
if not hist:
raise AgentHistory.DoesNotExist
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
@@ -686,13 +686,16 @@ class TestAgentViews(TacticalTestCase):
"output": "collector",
"args": ["hello", "world"],
"timeout": 22,
"custom_field": custom_field.id, # type: ignore
"custom_field": custom_field.pk,
"save_all_output": False,
}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
if not hist:
raise AgentHistory.DoesNotExist
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
@@ -716,13 +719,16 @@ class TestAgentViews(TacticalTestCase):
"output": "collector",
"args": ["hello", "world"],
"timeout": 22,
"custom_field": custom_field.id, # type: ignore
"custom_field": custom_field.pk,
"save_all_output": False,
}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
if not hist:
raise AgentHistory.DoesNotExist
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
@@ -750,6 +756,9 @@ class TestAgentViews(TacticalTestCase):
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
hist = AgentHistory.objects.filter(agent=self.agent, script=script).last()
if not hist:
raise AgentHistory.DoesNotExist
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
@@ -771,8 +780,8 @@ class TestAgentViews(TacticalTestCase):
r = self.client.get(url)
serializer = AgentNoteSerializer(notes, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 4) # type: ignore
self.assertEqual(r.data, serializer.data) # type: ignore
self.assertEqual(len(r.data), 4)
self.assertEqual(r.data, serializer.data)
# test with agent_id
url = f"{base_url}/{agent.agent_id}/notes/"
@@ -780,8 +789,8 @@ class TestAgentViews(TacticalTestCase):
r = self.client.get(url)
serializer = AgentNoteSerializer(notes, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data), 4) # type: ignore
self.assertEqual(r.data, serializer.data) # type: ignore
self.assertEqual(len(r.data), 4)
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
@@ -792,7 +801,7 @@ class TestAgentViews(TacticalTestCase):
data = {"note": "This is a note", "agent_id": agent.agent_id}
r = self.client.post(url, data)
self.assertEqual(r.status_code, 200)
self.assertTrue(Note.objects.filter(agent=agent).exists()) # type: ignore
self.assertTrue(Note.objects.filter(agent=agent).exists())
self.check_not_authenticated("post", url)
@@ -800,7 +809,7 @@ class TestAgentViews(TacticalTestCase):
# setup
agent = baker.make_recipe("agents.agent")
note = baker.make("agents.Note", agent=agent)
url = f"{base_url}/notes/{note.id}/"
url = f"{base_url}/notes/{note.pk}/"
# test not found
r = self.client.get(f"{base_url}/notes/500/")
@@ -815,7 +824,7 @@ class TestAgentViews(TacticalTestCase):
# setup
agent = baker.make_recipe("agents.agent")
note = baker.make("agents.Note", agent=agent)
url = f"{base_url}/notes/{note.id}/"
url = f"{base_url}/notes/{note.pk}/"
# test not found
r = self.client.put(f"{base_url}/notes/500/")
@@ -825,7 +834,7 @@ class TestAgentViews(TacticalTestCase):
r = self.client.put(url, data)
self.assertEqual(r.status_code, 200)
new_note = Note.objects.get(pk=note.id) # type: ignore
new_note = Note.objects.get(pk=note.pk)
self.assertEqual(new_note.note, data["note"])
self.check_not_authenticated("put", url)
@@ -834,7 +843,7 @@ class TestAgentViews(TacticalTestCase):
# setup
agent = baker.make_recipe("agents.agent")
note = baker.make("agents.Note", agent=agent)
url = f"{base_url}/notes/{note.id}/"
url = f"{base_url}/notes/{note.pk}/"
# test not found
r = self.client.delete(f"{base_url}/notes/500/")
@@ -843,7 +852,7 @@ class TestAgentViews(TacticalTestCase):
r = self.client.delete(url)
self.assertEqual(r.status_code, 200)
self.assertFalse(Note.objects.filter(pk=note.id).exists()) # type: ignore
self.assertFalse(Note.objects.filter(pk=note.pk).exists())
self.check_not_authenticated("delete", url)
@@ -866,11 +875,6 @@ class TestAgentViews(TacticalTestCase):
self.assertEqual(r.data, data) # type:ignore
@modify_settings(
MIDDLEWARE={
"remove": "tacticalrmm.middleware.LinuxMiddleware",
}
)
class TestAgentViewsNew(TacticalTestCase):
def setUp(self):
self.authenticate()
@@ -883,14 +887,14 @@ class TestAgentViewsNew(TacticalTestCase):
agent = baker.make_recipe("agents.agent")
# Test client toggle maintenance mode
data = {"type": "Client", "id": agent.site.client.id, "action": True} # type: ignore
data = {"type": "Client", "id": agent.site.client.id, "action": True}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertTrue(Agent.objects.get(pk=agent.pk).maintenance_mode)
# Test site toggle maintenance mode
data = {"type": "Site", "id": agent.site.id, "action": False} # type: ignore
data = {"type": "Site", "id": agent.site.id, "action": False}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
@@ -905,20 +909,15 @@ class TestAgentViewsNew(TacticalTestCase):
self.check_not_authenticated("post", url)
@modify_settings(
MIDDLEWARE={
"remove": "tacticalrmm.middleware.LinuxMiddleware",
}
)
class TestAgentPermissions(TacticalTestCase):
def setUp(self):
self.client_setup()
self.setup_client()
self.setup_coresettings()
def test_list_agents_permissions(self):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
url = f"{base_url}/"
@@ -936,17 +935,17 @@ class TestAgentPermissions(TacticalTestCase):
# all agents should be returned
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 10) # type: ignore
self.assertEqual(len(response.data), 10)
# limit user to specific client. only 1 agent should be returned
user.role.can_view_clients.set([agents[4].client])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 2) # type: ignore
self.assertEqual(len(response.data), 2)
# limit agent to specific site. 2 should be returned now
user.role.can_view_sites.set([agents[6].site])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 4) # type: ignore
self.assertEqual(len(response.data), 4)
# make sure superusers work
self.check_authorized_superuser("get", url)
@@ -961,7 +960,7 @@ class TestAgentPermissions(TacticalTestCase):
b64_to_hex.return_value = "nodeid"
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
agent = baker.make_recipe("agents.agent")
baker.make_recipe("winupdate.winupdate_policy", agent=agent)
@@ -1044,7 +1043,7 @@ class TestAgentPermissions(TacticalTestCase):
self.check_authorized_superuser(test["method"], url)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized(test["method"], url)
@@ -1071,9 +1070,9 @@ class TestAgentPermissions(TacticalTestCase):
site = baker.make("clients.Site")
client = baker.make("clients.Client")
site_data = {"id": site.id, "type": "Site", "action": True}
site_data = {"id": site.pk, "type": "Site", "action": True}
client_data = {"id": client.id, "type": "Client", "action": True}
client_data = {"id": client.pk, "type": "Client", "action": True}
url = f"{base_url}/maintenance/bulk/"
@@ -1082,7 +1081,7 @@ class TestAgentPermissions(TacticalTestCase):
self.check_authorized_superuser("post", url, client_data)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized("post", url, site_data)
@@ -1123,7 +1122,7 @@ class TestAgentPermissions(TacticalTestCase):
update_task.reset_mock()
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
self.check_not_authorized("post", url, data)
update_task.assert_not_called()
@@ -1162,10 +1161,10 @@ class TestAgentPermissions(TacticalTestCase):
# test superuser access
response = self.check_authorized_superuser("get", url)
self.assertEqual(len(response.data["agents"]), 12) # type: ignore
self.assertEqual(len(response.data["agents"]), 12)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
self.check_not_authorized("get", url)
@@ -1173,22 +1172,22 @@ class TestAgentPermissions(TacticalTestCase):
user.role.save()
response = self.check_authorized("get", url)
self.assertEqual(len(response.data["agents"]), 12) # type: ignore
self.assertEqual(len(response.data["agents"]), 12)
# limit to client
user.role.can_view_clients.set([agents[0].client])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data["agents"]), 5) # type: ignore
self.assertEqual(len(response.data["agents"]), 5)
# add site
user.role.can_view_sites.set([other_agents[0].site])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data["agents"]), 12) # type: ignore
self.assertEqual(len(response.data["agents"]), 12)
# remove client permissions
user.role.can_view_clients.clear()
response = self.check_authorized("get", url)
self.assertEqual(len(response.data["agents"]), 7) # type: ignore
self.assertEqual(len(response.data["agents"]), 7)
def test_generating_agent_installer_permissions(self):
@@ -1202,7 +1201,7 @@ class TestAgentPermissions(TacticalTestCase):
self.check_authorized_superuser("post", url)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
self.check_not_authorized("post", url)
@@ -1215,8 +1214,8 @@ class TestAgentPermissions(TacticalTestCase):
user.role.can_view_clients.set([client])
data = {
"client": client.id,
"site": client_site.id,
"client": client.pk,
"site": client_site.pk,
"version": settings.LATEST_AGENT_VER,
"arch": "64",
}
@@ -1224,8 +1223,8 @@ class TestAgentPermissions(TacticalTestCase):
self.check_authorized("post", url, data)
data = {
"client": site.client.id,
"site": site.id,
"client": site.client.pk,
"site": site.pk,
"version": settings.LATEST_AGENT_VER,
"arch": "64",
}
@@ -1236,8 +1235,8 @@ class TestAgentPermissions(TacticalTestCase):
user.role.can_view_clients.clear()
user.role.can_view_sites.set([site])
data = {
"client": site.client.id,
"site": site.id,
"client": site.client.pk,
"site": site.pk,
"version": settings.LATEST_AGENT_VER,
"arch": "64",
}
@@ -1245,8 +1244,8 @@ class TestAgentPermissions(TacticalTestCase):
self.check_authorized("post", url, data)
data = {
"client": client.id,
"site": client_site.id,
"client": client.pk,
"site": client_site.pk,
"version": settings.LATEST_AGENT_VER,
"arch": "64",
}
@@ -1267,17 +1266,17 @@ class TestAgentPermissions(TacticalTestCase):
{"url": f"{base_url}/notes/", "method": "get", "role": "can_list_notes"},
{"url": f"{base_url}/notes/", "method": "post", "role": "can_manage_notes"},
{
"url": f"{base_url}/notes/{notes[0].id}/",
"url": f"{base_url}/notes/{notes[0].pk}/",
"method": "get",
"role": "can_list_notes",
},
{
"url": f"{base_url}/notes/{notes[0].id}/",
"url": f"{base_url}/notes/{notes[0].pk}/",
"method": "put",
"role": "can_manage_notes",
},
{
"url": f"{base_url}/notes/{notes[0].id}/",
"url": f"{base_url}/notes/{notes[0].pk}/",
"method": "delete",
"role": "can_manage_notes",
},
@@ -1288,7 +1287,7 @@ class TestAgentPermissions(TacticalTestCase):
self.check_authorized_superuser(test["method"], test["url"])
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
self.check_not_authorized(test["method"], test["url"])
setattr(user.role, test["role"], True)
@@ -1299,7 +1298,7 @@ class TestAgentPermissions(TacticalTestCase):
user = self.create_user_with_roles(["can_list_notes", "can_manage_notes"])
user.role.can_view_sites.set([agent.site])
user.role.save()
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
authorized_data = {"note": "Test not here", "agent_id": agent.agent_id}
@@ -1310,7 +1309,7 @@ class TestAgentPermissions(TacticalTestCase):
# should only return the 4 allowed agent notes (one got deleted above in loop)
r = self.client.get(f"{base_url}/notes/")
self.assertEqual(len(r.data), 4) # type: ignore
self.assertEqual(len(r.data), 4)
# test with agent_id in url
self.check_authorized("get", f"{base_url}/{agent.agent_id}/notes/")
@@ -1321,25 +1320,25 @@ class TestAgentPermissions(TacticalTestCase):
# test post get, put, and delete and make sure unauthorized is returned with unauthorized agent and works for authorized
self.check_authorized("post", f"{base_url}/notes/", authorized_data)
self.check_not_authorized("post", f"{base_url}/notes/", unauthorized_data)
self.check_authorized("get", f"{base_url}/notes/{notes[2].id}/")
self.check_authorized("get", f"{base_url}/notes/{notes[2].pk}/")
self.check_not_authorized(
"get", f"{base_url}/notes/{unauthorized_notes[2].id}/"
"get", f"{base_url}/notes/{unauthorized_notes[2].pk}/"
)
self.check_authorized(
"put", f"{base_url}/notes/{notes[3].id}/", authorized_data
"put", f"{base_url}/notes/{notes[3].pk}/", authorized_data
)
self.check_not_authorized(
"put", f"{base_url}/notes/{unauthorized_notes[3].id}/", unauthorized_data
"put", f"{base_url}/notes/{unauthorized_notes[3].pk}/", unauthorized_data
)
self.check_authorized("delete", f"{base_url}/notes/{notes[3].id}/")
self.check_authorized("delete", f"{base_url}/notes/{notes[3].pk}/")
self.check_not_authorized(
"delete", f"{base_url}/notes/{unauthorized_notes[3].id}/"
"delete", f"{base_url}/notes/{unauthorized_notes[3].pk}/"
)
def test_get_agent_history_permissions(self):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
sites = baker.make("clients.Site", _quantity=2)
agent = baker.make_recipe("agents.agent", site=sites[0])
@@ -1368,14 +1367,14 @@ class TestAgentPermissions(TacticalTestCase):
r = self.check_authorized("get", url)
self.check_authorized("get", authorized_url)
self.check_authorized("get", unauthorized_url)
self.assertEqual(len(r.data), 11) # type: ignore
self.assertEqual(len(r.data), 11)
# limit user to specific client.
user.role.can_view_clients.set([agent.client])
self.check_authorized("get", authorized_url)
self.check_not_authorized("get", unauthorized_url)
r = self.check_authorized("get", url)
self.assertEqual(len(r.data), 5) # type: ignore
self.assertEqual(len(r.data), 5)
# make sure superusers work
self.check_authorized_superuser("get", url)
@@ -1383,11 +1382,6 @@ class TestAgentPermissions(TacticalTestCase):
self.check_authorized_superuser("get", unauthorized_url)
@modify_settings(
MIDDLEWARE={
"remove": "tacticalrmm.middleware.LinuxMiddleware",
}
)
class TestAgentTasks(TacticalTestCase):
def setUp(self):
self.authenticate()
@@ -1426,8 +1420,8 @@ class TestAgentTasks(TacticalTestCase):
r = agent_update(agent64_nosign.agent_id)
self.assertEqual(r, "created")
action = PendingAction.objects.get(agent__agent_id=agent64_nosign.agent_id)
self.assertEqual(action.action_type, "agentupdate")
self.assertEqual(action.status, "pending")
self.assertEqual(action.action_type, PAAction.AGENT_UPDATE)
self.assertEqual(action.status, PAStatus.PENDING)
self.assertEqual(
action.details["url"],
f"https://github.com/amidaware/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
@@ -1458,13 +1452,13 @@ class TestAgentTasks(TacticalTestCase):
nats_cmd.return_value = "ok"
get_exe.return_value = "https://exe.tacticalrmm.io"
r = agent_update(agent64_sign.pk, codesign.token) # type: ignore
r = agent_update(agent64_sign.pk, codesign.token)
self.assertEqual(r, "created")
nats_cmd.assert_called_with(
{
"func": "agentupdate",
"payload": {
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=64&token=testtoken123", # type: ignore
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=64&token=testtoken123",
"version": settings.LATEST_AGENT_VER,
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
},
@@ -1472,8 +1466,8 @@ class TestAgentTasks(TacticalTestCase):
wait=False,
)
action = PendingAction.objects.get(agent__pk=agent64_sign.pk)
self.assertEqual(action.action_type, "agentupdate")
self.assertEqual(action.status, "pending")
self.assertEqual(action.action_type, PAAction.AGENT_UPDATE)
self.assertEqual(action.status, PAStatus.PENDING)
# test __with__ code signing (32 bit)
agent32_sign = baker.make_recipe(
@@ -1484,13 +1478,13 @@ class TestAgentTasks(TacticalTestCase):
nats_cmd.return_value = "ok"
get_exe.return_value = "https://exe.tacticalrmm.io"
r = agent_update(agent32_sign.pk, codesign.token) # type: ignore
r = agent_update(agent32_sign.pk, codesign.token)
self.assertEqual(r, "created")
nats_cmd.assert_called_with(
{
"func": "agentupdate",
"payload": {
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=32&token=testtoken123", # type: ignore
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=32&token=testtoken123",
"version": settings.LATEST_AGENT_VER,
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
},
@@ -1498,8 +1492,8 @@ class TestAgentTasks(TacticalTestCase):
wait=False,
)
action = PendingAction.objects.get(agent__pk=agent32_sign.pk)
self.assertEqual(action.action_type, "agentupdate")
self.assertEqual(action.status, "pending") """
self.assertEqual(action.action_type, PAAction.AGENT_UPDATE)
self.assertEqual(action.status, PAStatus.PENDING) """
@patch("agents.tasks.agent_update")
@patch("agents.tasks.sleep", return_value=None)
@@ -1530,7 +1524,7 @@ class TestAgentTasks(TacticalTestCase):
self.assertEqual(agent_update.call_count, 33)
def test_agent_history_prune_task(self):
from .tasks import prune_agent_history
from agents.tasks import prune_agent_history
# setup data
agent = baker.make_recipe("agents.agent")
@@ -1541,7 +1535,7 @@ class TestAgentTasks(TacticalTestCase):
)
days = 0
for item in history: # type: ignore
for item in history:
item.time = djangotime.now() - djangotime.timedelta(days=days)
item.save()
days = days + 5

View File

@@ -0,0 +1,62 @@
from typing import TYPE_CHECKING
from unittest.mock import patch
from model_bakery import baker
from tacticalrmm.test import TacticalTestCase
if TYPE_CHECKING:
from clients.models import Client, Site
class TestRecovery(TacticalTestCase):
def setUp(self) -> None:
self.authenticate()
self.setup_coresettings()
self.client1: "Client" = baker.make("clients.Client")
self.site1: "Site" = baker.make("clients.Site", client=self.client1)
@patch("agents.models.Agent.recover")
@patch("agents.views.get_mesh_ws_url")
def test_recover(self, get_mesh_ws_url, recover) -> None:
get_mesh_ws_url.return_value = "https://mesh.example.com"
agent = baker.make_recipe(
"agents.online_agent",
site=self.site1,
monitoring_type="server",
plat="windows",
)
url = f"/agents/{agent.agent_id}/recover/"
# test successfull tacticalagent recovery
data = {"mode": "tacagent"}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
recover.assert_called_with("tacagent", "https://mesh.example.com", wait=False)
get_mesh_ws_url.assert_called_once()
# reset mocks
recover.reset_mock()
get_mesh_ws_url.reset_mock()
# test successfull mesh agent recovery
data = {"mode": "mesh"}
recover.return_value = ("ok", False)
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
get_mesh_ws_url.assert_not_called()
recover.assert_called_with("mesh", "")
# reset mocks
recover.reset_mock()
get_mesh_ws_url.reset_mock()
# test failed mesh agent recovery
data = {"mode": "mesh"}
recover.return_value = ("Unable to contact the agent", True)
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("post", url)

View File

@@ -1,6 +1,7 @@
from django.urls import path
from autotasks.views import GetAddAutoTasks
from checks.views import GetAddChecks
from django.urls import path
from logs.views import PendingActions
from . import views

View File

@@ -2,11 +2,11 @@ import asyncio
import tempfile
import urllib.parse
from core.models import CodeSignToken, CoreSettings
from core.utils import get_mesh_device_id, get_mesh_ws_url
from django.conf import settings
from django.http import FileResponse
from core.models import CodeSignToken
from core.utils import get_core_settings, get_mesh_device_id, get_mesh_ws_url
from tacticalrmm.constants import MeshAgentIdent
@@ -19,18 +19,18 @@ def get_agent_url(arch: str, plat: str) -> str:
endpoint = "linuxagents"
dl_url = ""
try:
t: CodeSignToken = CodeSignToken.objects.first() # type: ignore
if t.is_valid:
base_url = settings.EXE_GEN_URL + f"/api/v1/{endpoint}/?"
params = {
"version": settings.LATEST_AGENT_VER,
"arch": arch,
"token": t.token,
}
dl_url = base_url + urllib.parse.urlencode(params)
except:
pass
token = CodeSignToken.objects.first()
if not token:
return dl_url
if token.is_valid:
base_url = settings.EXE_GEN_URL + f"/api/v1/{endpoint}/?"
params = {
"version": settings.LATEST_AGENT_VER,
"arch": arch,
"token": token.token,
}
dl_url = base_url + urllib.parse.urlencode(params)
return dl_url
@@ -54,12 +54,16 @@ def generate_linux_install(
arch_id = MeshAgentIdent.LINUX_ARM_64
case "arm":
arch_id = MeshAgentIdent.LINUX_ARM_HF
case _:
arch_id = "not_found"
core: CoreSettings = CoreSettings.objects.first() # type: ignore
core = get_core_settings()
uri = get_mesh_ws_url()
mesh_id = asyncio.run(get_mesh_device_id(uri, core.mesh_device_group))
mesh_dl = f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=0&meshinstall={arch_id}" # type: ignore
mesh_dl = (
f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=0&meshinstall={arch_id}"
)
sh = settings.LINUX_AGENT_SCRIPT
with open(sh, "r") as f:

View File

@@ -5,32 +5,35 @@ import random
import string
import time
from core.models import CodeSignToken, CoreSettings
from core.utils import get_mesh_ws_url, remove_mesh_agent, send_command_with_mesh
from django.conf import settings
from django.db.models import Q
from django.db.models import Count, Exists, OuterRef, Prefetch, Q
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from logs.models import AuditLog, DebugLog, PendingAction
from meshctrl.utils import get_login_token
from packaging import version as pyver
from rest_framework.decorators import api_view, permission_classes
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from core.models import CodeSignToken
from core.utils import get_core_settings, get_mesh_ws_url, remove_mesh_agent
from logs.models import AuditLog, DebugLog, PendingAction
from scripts.models import Script
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
from winupdate.serializers import WinUpdatePolicySerializer
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
from tacticalrmm.constants import AGENT_DEFER
from tacticalrmm.constants import AGENT_DEFER, EvtLogNames, PAAction, PAStatus
from tacticalrmm.helpers import notify_error
from tacticalrmm.permissions import (
_has_perm_on_agent,
_has_perm_on_client,
_has_perm_on_site,
)
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
from tacticalrmm.utils import get_default_timezone, reload_nats
from winupdate.models import WinUpdate
from winupdate.serializers import WinUpdatePolicySerializer
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
from .models import Agent, AgentCustomField, AgentHistory, Note
from .permissions import (
@@ -64,12 +67,22 @@ class GetAgents(APIView):
permission_classes = [IsAuthenticated, AgentPerms]
def get(self, request):
from checks.models import Check, CheckResult
monitoring_type_filter = Q()
client_site_filter = Q()
monitoring_type = request.query_params.get("monitoring_type", None)
if monitoring_type:
if monitoring_type in ["server", "workstation"]:
monitoring_type_filter = Q(monitoring_type=monitoring_type)
else:
return notify_error("monitoring type does not exist")
if "site" in request.query_params.keys():
filter = Q(site_id=request.query_params["site"])
client_site_filter = Q(site_id=request.query_params["site"])
elif "client" in request.query_params.keys():
filter = Q(site__client_id=request.query_params["client"])
else:
filter = Q()
client_site_filter = Q(site__client_id=request.query_params["client"])
# by default detail=true
if (
@@ -77,24 +90,53 @@ class GetAgents(APIView):
or "detail" in request.query_params.keys()
and request.query_params["detail"] == "true"
):
agents = (
Agent.objects.filter_by_role(request.user) # type: ignore
.select_related("site", "policy", "alert_template")
.prefetch_related("agentchecks")
.filter(filter)
.filter(monitoring_type_filter)
.filter(client_site_filter)
.defer(*AGENT_DEFER)
.select_related(
"site__server_policy",
"site__workstation_policy",
"site__client__server_policy",
"site__client__workstation_policy",
"policy",
"alert_template",
)
.prefetch_related(
Prefetch(
"agentchecks",
queryset=Check.objects.select_related("script"),
),
Prefetch(
"checkresults",
queryset=CheckResult.objects.select_related("assigned_check"),
),
)
.annotate(
pending_actions_count=Count(
"pendingactions",
filter=Q(pendingactions__status=PAStatus.PENDING),
)
)
.annotate(
has_patches_pending=Exists(
WinUpdate.objects.filter(
agent_id=OuterRef("pk"), action="approve", installed=False
)
)
)
)
ctx = {"default_tz": get_default_timezone()}
serializer = AgentTableSerializer(agents, many=True, context=ctx)
serializer = AgentTableSerializer(agents, many=True)
# if detail=false
else:
agents = (
Agent.objects.filter_by_role(request.user) # type: ignore
.select_related("site")
.filter(filter)
.only("agent_id", "hostname", "site")
.defer(*AGENT_DEFER)
.select_related("site__client")
.filter(monitoring_type_filter)
.filter(client_site_filter)
)
serializer = AgentHostnameSerializer(agents, many=True)
@@ -130,13 +172,13 @@ class GetUpdateDeleteAgent(APIView):
for field in request.data["custom_fields"]:
custom_field = field
custom_field["agent"] = agent.id # type: ignore
custom_field["agent"] = agent.pk
if AgentCustomField.objects.filter(
field=field["field"], agent=agent.id # type: ignore
field=field["field"], agent=agent.pk
):
value = AgentCustomField.objects.get(
field=field["field"], agent=agent.id # type: ignore
field=field["field"], agent=agent.pk
)
serializer = AgentCustomFieldSerializer(
instance=value, data=custom_field
@@ -206,19 +248,19 @@ class AgentMeshCentral(APIView):
# get mesh urls
def get(self, request, agent_id):
agent = get_object_or_404(Agent, agent_id=agent_id)
core = CoreSettings.objects.first()
core = get_core_settings()
token = agent.get_login_token(
key=core.mesh_token,
user=f"user//{core.mesh_username.lower()}", # type:ignore
)
if not core.mesh_disable_auto_login:
token = get_login_token(
key=core.mesh_token, user=f"user//{core.mesh_username}"
)
token_param = f"login={token}&"
else:
token_param = ""
if token == "err":
return notify_error("Invalid mesh token")
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31" # type:ignore
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31" # type:ignore
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31" # type:ignore
control = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
terminal = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
file = f"{core.mesh_site}/?{token_param}gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
AuditLog.audit_mesh_session(
username=request.user.username,
@@ -252,9 +294,9 @@ class AgentMeshCentral(APIView):
@permission_classes([IsAuthenticated, AgentPerms])
def get_agent_versions(request):
agents = (
Agent.objects.filter_by_role(request.user)
.prefetch_related("site")
.only("pk", "hostname")
Agent.objects.defer(*AGENT_DEFER)
.filter_by_role(request.user) # type: ignore
.select_related("site__client")
)
return Response(
{
@@ -268,7 +310,7 @@ def get_agent_versions(request):
@permission_classes([IsAuthenticated, UpdateAgentPerms])
def update_agents(request):
q = (
Agent.objects.filter_by_role(request.user)
Agent.objects.filter_by_role(request.user) # type: ignore
.filter(agent_id__in=request.data["agent_ids"])
.only("agent_id", "version")
)
@@ -294,9 +336,9 @@ def ping(request, agent_id):
break
else:
attempts += 1
time.sleep(1)
time.sleep(0.5)
if attempts >= 5:
if attempts >= 3:
break
return Response({"name": agent.hostname, "status": status})
@@ -311,7 +353,7 @@ def get_event_log(request, agent_id, logtype, days):
return demo_get_eventlog()
agent = get_object_or_404(Agent, agent_id=agent_id)
timeout = 180 if logtype == "Security" else 30
timeout = 180 if logtype == EvtLogNames.SECURITY else 30
data = {
"func": "eventlog",
@@ -385,9 +427,11 @@ class Reboot(APIView):
# reboot later
def patch(self, request, agent_id):
agent = get_object_or_404(Agent, agent_id=agent_id)
if agent.is_posix:
return notify_error(f"Not currently implemented for {agent.plat}")
try:
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%dT%H:%M:%S")
except Exception:
return notify_error("Invalid date")
@@ -426,7 +470,7 @@ class Reboot(APIView):
details = {"taskname": task_name, "time": str(obj)}
PendingAction.objects.create(
agent=agent, action_type="schedreboot", details=details
agent=agent, action_type=PAAction.SCHED_REBOOT, details=details
)
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
return Response(
@@ -437,9 +481,10 @@ class Reboot(APIView):
@api_view(["POST"])
@permission_classes([IsAuthenticated, InstallAgentPerms])
def install_agent(request):
from knox.models import AuthToken
from accounts.models import User
from agents.utils import get_agent_url
from knox.models import AuthToken
client_id = request.data["client"]
site_id = request.data["site"]
@@ -486,14 +531,11 @@ def install_agent(request):
# linux agents are in beta for now, only available for sponsors for testing
# remove this after it's out of beta
try:
t: CodeSignToken = CodeSignToken.objects.first() # type: ignore
except:
return notify_error("Something went wrong")
if t is None:
code_token = CodeSignToken.objects.first()
if not code_token:
return notify_error("Missing code signing token")
if not t.is_valid:
if not code_token.is_valid:
return notify_error("Code signing token is not valid")
from agents.utils import generate_linux_install
@@ -597,28 +639,23 @@ def install_agent(request):
@api_view(["POST"])
@permission_classes([IsAuthenticated, RecoverAgentPerms])
def recover(request, agent_id):
agent = get_object_or_404(Agent, agent_id=agent_id)
def recover(request, agent_id: str) -> Response:
agent: Agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=agent_id
)
mode = request.data["mode"]
if mode == "tacagent":
if agent.is_posix:
cmd = "systemctl restart tacticalagent.service"
shell = 3
else:
cmd = "net stop tacticalrmm & taskkill /F /IM tacticalrmm.exe & net start tacticalrmm"
shell = 1
uri = get_mesh_ws_url()
asyncio.run(send_command_with_mesh(cmd, uri, agent.mesh_node_id, shell, 0))
agent.recover(mode, uri, wait=False)
return Response("Recovery will be attempted shortly")
elif mode == "mesh":
data = {"func": "recover", "payload": {"mode": mode}}
r = asyncio.run(agent.nats_cmd(data, timeout=20))
if r == "ok":
return Response("Successfully completed recovery")
r, err = agent.recover(mode, "")
if err:
return notify_error(f"Unable to complete recovery: {r}")
return notify_error("Something went wrong")
return Response("Successfully completed recovery")
@api_view(["POST"])
@@ -723,7 +760,7 @@ class GetAddNotes(APIView):
agent = get_object_or_404(Agent, agent_id=agent_id)
notes = Note.objects.filter(agent=agent)
else:
notes = Note.objects.filter_by_role(request.user)
notes = Note.objects.filter_by_role(request.user) # type: ignore
return Response(AgentNoteSerializer(notes, many=True).data)
@@ -788,24 +825,24 @@ def bulk(request):
if request.data["target"] == "client":
if not _has_perm_on_client(request.user, request.data["client"]):
raise PermissionDenied()
q = Agent.objects.filter_by_role(request.user).filter(
q = Agent.objects.filter_by_role(request.user).filter( # type: ignore
site__client_id=request.data["client"]
)
elif request.data["target"] == "site":
if not _has_perm_on_site(request.user, request.data["site"]):
raise PermissionDenied()
q = Agent.objects.filter_by_role(request.user).filter(
q = Agent.objects.filter_by_role(request.user).filter( # type: ignore
site_id=request.data["site"]
)
elif request.data["target"] == "agents":
q = Agent.objects.filter_by_role(request.user).filter(
q = Agent.objects.filter_by_role(request.user).filter( # type: ignore
agent_id__in=request.data["agents"]
)
elif request.data["target"] == "all":
q = Agent.objects.filter_by_role(request.user).only("pk", "monitoring_type")
q = Agent.objects.filter_by_role(request.user).only("pk", "monitoring_type") # type: ignore
else:
return notify_error("Something went wrong")
@@ -882,7 +919,7 @@ def agent_maintenance(request):
raise PermissionDenied()
count = (
Agent.objects.filter_by_role(request.user)
Agent.objects.filter_by_role(request.user) # type: ignore
.filter(site__client_id=request.data["id"])
.update(maintenance_mode=request.data["action"])
)
@@ -892,7 +929,7 @@ def agent_maintenance(request):
raise PermissionDenied()
count = (
Agent.objects.filter_by_role(request.user)
Agent.objects.filter_by_role(request.user) # type: ignore
.filter(site_id=request.data["id"])
.update(maintenance_mode=request.data["action"])
)
@@ -928,6 +965,6 @@ class AgentHistoryView(APIView):
agent = get_object_or_404(Agent, agent_id=agent_id)
history = AgentHistory.objects.filter(agent=agent)
else:
history = AgentHistory.objects.filter_by_role(request.user)
history = AgentHistory.objects.filter_by_role(request.user) # type: ignore
ctx = {"default_tz": get_default_timezone()}
return Response(AgentHistorySerializer(history, many=True, context=ctx).data)

View File

@@ -0,0 +1,24 @@
# Generated by Django 4.0.3 on 2022-04-07 17:28
import django.db.models.deletion
from django.db import migrations, models
def delete_alerts_without_agent(apps, schema):
Alert = apps.get_model("alerts", "Alert")
Alert.objects.filter(agent=None).delete()
class Migration(migrations.Migration):
dependencies = [
("agents", "0047_alter_agent_plat_alter_agent_site"),
("alerts", "0010_auto_20210917_1954"),
]
operations = [
migrations.RunPython(
delete_alerts_without_agent, reverse_code=migrations.RunPython.noop
),
]

View File

@@ -1,20 +1,22 @@
from __future__ import annotations
import re
from typing import TYPE_CHECKING, Union
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.db.models.fields import BooleanField, PositiveIntegerField
from django.utils import timezone as djangotime
from logs.models import BaseAuditModel, DebugLog
from logs.models import BaseAuditModel, DebugLog
from tacticalrmm.constants import CheckType, DebugLogType
from tacticalrmm.models import PermissionQuerySet
if TYPE_CHECKING:
from agents.models import Agent
from autotasks.models import AutomatedTask
from checks.models import Check
from autotasks.models import AutomatedTask, TaskResult
from checks.models import Check, CheckResult
from clients.models import Client, Site
SEVERITY_CHOICES = [
@@ -83,62 +85,198 @@ class Alert(models.Model):
max_length=100, null=True, blank=True
)
def __str__(self):
return self.message
def __str__(self) -> str:
return f"{self.alert_type} - {self.message}"
def resolve(self):
@property
def assigned_agent(self) -> "Optional[Agent]":
return self.agent
@property
def site(self) -> "Site":
return self.agent.site
@property
def client(self) -> "Client":
return self.agent.client
def resolve(self) -> None:
self.resolved = True
self.resolved_on = djangotime.now()
self.snoozed = False
self.snooze_until = None
self.save()
self.save(update_fields=["resolved", "resolved_on", "snoozed", "snooze_until"])
@classmethod
def create_or_return_availability_alert(cls, agent):
if not cls.objects.filter(agent=agent, resolved=False).exists():
return cls.objects.create(
agent=agent,
alert_type="availability",
severity="error",
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
hidden=True,
def create_or_return_availability_alert(
cls, agent: Agent, skip_create: bool = False
) -> Optional[Alert]:
if not cls.objects.filter(
agent=agent, alert_type="availability", resolved=False
).exists():
if skip_create:
return None
return cast(
Alert,
cls.objects.create(
agent=agent,
alert_type="availability",
severity="error",
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
hidden=True,
),
)
else:
return cls.objects.get(agent=agent, resolved=False)
try:
return cast(
Alert,
cls.objects.get(
agent=agent, alert_type="availability", resolved=False
),
)
except cls.MultipleObjectsReturned:
alerts = cls.objects.filter(
agent=agent, alert_type="availability", resolved=False
)
last_alert = cast(Alert, alerts.last())
# cycle through other alerts and resolve
for alert in alerts:
if alert.id != last_alert.pk:
alert.resolve()
return last_alert
except cls.DoesNotExist:
return None
@classmethod
def create_or_return_check_alert(cls, check):
def create_or_return_check_alert(
cls,
check: "Check",
agent: "Agent",
alert_severity: Optional[str] = None,
skip_create: bool = False,
) -> "Optional[Alert]":
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
return cls.objects.create(
assigned_check=check,
alert_type="check",
severity=check.alert_severity,
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
hidden=True,
# need to pass agent if the check is a policy
if not cls.objects.filter(
assigned_check=check,
agent=agent,
resolved=False,
).exists():
if skip_create:
return None
return cast(
Alert,
cls.objects.create(
assigned_check=check,
agent=agent,
alert_type="check",
severity=check.alert_severity
if check.check_type
not in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]
else alert_severity,
message=f"{agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
hidden=True,
),
)
else:
return cls.objects.get(assigned_check=check, resolved=False)
try:
return cast(
Alert,
cls.objects.get(
assigned_check=check,
agent=agent,
resolved=False,
),
)
except cls.MultipleObjectsReturned:
alerts = cls.objects.filter(
assigned_check=check,
agent=agent,
resolved=False,
)
last_alert = cast(Alert, alerts.last())
# cycle through other alerts and resolve
for alert in alerts:
if alert.id != last_alert.pk:
alert.resolve()
return last_alert
except cls.DoesNotExist:
return None
@classmethod
def create_or_return_task_alert(cls, task):
def create_or_return_task_alert(
cls,
task: "AutomatedTask",
agent: "Agent",
skip_create: bool = False,
) -> "Optional[Alert]":
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
return cls.objects.create(
assigned_task=task,
alert_type="task",
severity=task.alert_severity,
message=f"{task.agent.hostname} has task: {task.name} that failed.",
hidden=True,
if not cls.objects.filter(
assigned_task=task,
agent=agent,
resolved=False,
).exists():
if skip_create:
return None
return cast(
Alert,
cls.objects.create(
assigned_task=task,
agent=agent,
alert_type="task",
severity=task.alert_severity,
message=f"{agent.hostname} has task: {task.name} that failed.",
hidden=True,
),
)
else:
return cls.objects.get(assigned_task=task, resolved=False)
try:
return cast(
Alert,
cls.objects.get(
assigned_task=task,
agent=agent,
resolved=False,
),
)
except cls.MultipleObjectsReturned:
alerts = cls.objects.filter(
assigned_task=task,
agent=agent,
resolved=False,
)
last_alert = cast(Alert, alerts.last())
# cycle through other alerts and resolve
for alert in alerts:
if alert.id != last_alert.pk:
alert.resolve()
return last_alert
except cls.DoesNotExist:
return None
@classmethod
def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
def handle_alert_failure(
cls, instance: Union[Agent, TaskResult, CheckResult]
) -> None:
from agents.models import Agent
from autotasks.models import AutomatedTask
from checks.models import Check
from autotasks.models import TaskResult
from checks.models import CheckResult
# set variables
dashboard_severities = None
@@ -150,6 +288,7 @@ class Alert(models.Model):
alert_interval = None
email_task = None
text_task = None
run_script_action = None
# check what the instance passed is
if isinstance(instance, Agent):
@@ -165,28 +304,19 @@ class Alert(models.Model):
maintenance_mode = instance.maintenance_mode
alert_severity = "error"
agent = instance
dashboard_severities = ["error"]
email_severities = ["error"]
text_severities = ["error"]
# set alert_template settings
if alert_template:
dashboard_severities = ["error"]
email_severities = ["error"]
text_severities = ["error"]
always_dashboard = alert_template.agent_always_alert
always_email = alert_template.agent_always_email
always_text = alert_template.agent_always_text
alert_interval = alert_template.agent_periodic_alert_days
run_script_action = alert_template.agent_script_actions
if instance.should_create_alert(alert_template):
alert = cls.create_or_return_availability_alert(instance)
else:
# check if there is an alert that exists
if cls.objects.filter(agent=instance, resolved=False).exists():
alert = cls.objects.get(agent=instance, resolved=False)
else:
alert = None
elif isinstance(instance, Check):
elif isinstance(instance, CheckResult):
from checks.tasks import (
handle_check_email_alert_task,
handle_check_sms_alert_task,
@@ -195,75 +325,94 @@ class Alert(models.Model):
email_task = handle_check_email_alert_task
text_task = handle_check_sms_alert_task
email_alert = instance.email_alert
text_alert = instance.text_alert
dashboard_alert = instance.dashboard_alert
email_alert = instance.assigned_check.email_alert
text_alert = instance.assigned_check.text_alert
dashboard_alert = instance.assigned_check.dashboard_alert
alert_template = instance.agent.alert_template
maintenance_mode = instance.agent.maintenance_mode
alert_severity = instance.alert_severity
alert_severity = (
instance.assigned_check.alert_severity
if instance.assigned_check.check_type
not in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]
else instance.alert_severity
)
agent = instance.agent
# set alert_template settings
if alert_template:
dashboard_severities = alert_template.check_dashboard_alert_severity
email_severities = alert_template.check_email_alert_severity
text_severities = alert_template.check_text_alert_severity
dashboard_severities = (
alert_template.check_dashboard_alert_severity
if alert_template.check_dashboard_alert_severity
else ["error", "warning", "info"]
)
email_severities = (
alert_template.check_email_alert_severity
if alert_template.check_email_alert_severity
else ["error", "warning"]
)
text_severities = (
alert_template.check_text_alert_severity
if alert_template.check_text_alert_severity
else ["error", "warning"]
)
always_dashboard = alert_template.check_always_alert
always_email = alert_template.check_always_email
always_text = alert_template.check_always_text
alert_interval = alert_template.check_periodic_alert_days
run_script_action = alert_template.check_script_actions
if instance.should_create_alert(alert_template):
alert = cls.create_or_return_check_alert(instance)
else:
# check if there is an alert that exists
if cls.objects.filter(assigned_check=instance, resolved=False).exists():
alert = cls.objects.get(assigned_check=instance, resolved=False)
else:
alert = None
elif isinstance(instance, AutomatedTask):
elif isinstance(instance, TaskResult):
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
email_task = handle_task_email_alert
text_task = handle_task_sms_alert
email_alert = instance.email_alert
text_alert = instance.text_alert
dashboard_alert = instance.dashboard_alert
email_alert = instance.task.email_alert
text_alert = instance.task.text_alert
dashboard_alert = instance.task.dashboard_alert
alert_template = instance.agent.alert_template
maintenance_mode = instance.agent.maintenance_mode
alert_severity = instance.alert_severity
alert_severity = instance.task.alert_severity
agent = instance.agent
# set alert_template settings
if alert_template:
dashboard_severities = alert_template.task_dashboard_alert_severity
email_severities = alert_template.task_email_alert_severity
text_severities = alert_template.task_text_alert_severity
dashboard_severities = (
alert_template.task_dashboard_alert_severity
if alert_template.task_dashboard_alert_severity
else ["error", "warning"]
)
email_severities = (
alert_template.task_email_alert_severity
if alert_template.task_email_alert_severity
else ["error", "warning"]
)
text_severities = (
alert_template.task_text_alert_severity
if alert_template.task_text_alert_severity
else ["error", "warning"]
)
always_dashboard = alert_template.task_always_alert
always_email = alert_template.task_always_email
always_text = alert_template.task_always_text
alert_interval = alert_template.task_periodic_alert_days
run_script_action = alert_template.task_script_actions
if instance.should_create_alert(alert_template):
alert = cls.create_or_return_task_alert(instance)
else:
# check if there is an alert that exists
if cls.objects.filter(assigned_task=instance, resolved=False).exists():
alert = cls.objects.get(assigned_task=instance, resolved=False)
else:
alert = None
else:
return
alert = instance.get_or_create_alert_if_needed(alert_template)
# return if agent is in maintenance mode
if maintenance_mode or not alert:
if not alert or maintenance_mode:
return
# check if alert severity changed on check and update the alert
# check if alert severity changed and update the alert
if alert_severity != alert.severity:
alert.severity = alert_severity
alert.save(update_fields=["severity"])
@@ -272,19 +421,25 @@ class Alert(models.Model):
if dashboard_alert or always_dashboard:
# check if alert template is set and specific severities are configured
if alert_template and alert.severity not in dashboard_severities: # type: ignore
pass
else:
if (
not alert_template
or alert_template
and dashboard_severities
and alert.severity in dashboard_severities
):
alert.hidden = False
alert.save()
alert.save(update_fields=["hidden"])
# send email if enabled
if email_alert or always_email:
# check if alert template is set and specific severities are configured
if alert_template and alert.severity not in email_severities: # type: ignore
pass
else:
if (
not alert_template
or alert_template
and email_severities
and alert.severity in email_severities
):
email_task.delay(
pk=alert.pk,
alert_interval=alert_interval,
@@ -294,13 +449,21 @@ class Alert(models.Model):
if text_alert or always_text:
# check if alert template is set and specific severities are configured
if alert_template and alert.severity not in text_severities: # type: ignore
pass
else:
if (
not alert_template
or alert_template
and text_severities
and alert.severity in text_severities
):
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
# check if any scripts should be run
if alert_template and alert_template.action and run_script_action and not alert.action_run: # type: ignore
if (
alert_template
and alert_template.action
and run_script_action
and not alert.action_run
):
r = agent.run_script(
scriptpk=alert_template.action.pk,
args=alert.parse_script_args(alert_template.action_args),
@@ -311,7 +474,7 @@ class Alert(models.Model):
)
# command was successful
if type(r) == dict:
if isinstance(r, dict):
alert.action_retcode = r["retcode"]
alert.action_stdout = r["stdout"]
alert.action_stderr = r["stderr"]
@@ -321,21 +484,24 @@ class Alert(models.Model):
else:
DebugLog.error(
agent=agent,
log_type="scripting",
log_type=DebugLogType.SCRIPTING,
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
)
@classmethod
def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
def handle_alert_resolve(
cls, instance: Union[Agent, TaskResult, CheckResult]
) -> None:
from agents.models import Agent
from autotasks.models import AutomatedTask
from checks.models import Check
from autotasks.models import TaskResult
from checks.models import CheckResult
# set variables
email_on_resolved = False
text_on_resolved = False
resolved_email_task = None
resolved_text_task = None
run_script_action = None
# check what the instance passed is
if isinstance(instance, Agent):
@@ -345,7 +511,6 @@ class Alert(models.Model):
resolved_text_task = agent_recovery_sms_task
alert_template = instance.alert_template
alert = cls.objects.get(agent=instance, resolved=False)
maintenance_mode = instance.maintenance_mode
agent = instance
@@ -354,7 +519,12 @@ class Alert(models.Model):
text_on_resolved = alert_template.agent_text_on_resolved
run_script_action = alert_template.agent_script_actions
elif isinstance(instance, Check):
if agent.overdue_email_alert:
email_on_resolved = True
if agent.overdue_text_alert:
text_on_resolved = True
elif isinstance(instance, CheckResult):
from checks.tasks import (
handle_resolved_check_email_alert_task,
handle_resolved_check_sms_alert_task,
@@ -364,7 +534,6 @@ class Alert(models.Model):
resolved_text_task = handle_resolved_check_sms_alert_task
alert_template = instance.agent.alert_template
alert = cls.objects.get(assigned_check=instance, resolved=False)
maintenance_mode = instance.agent.maintenance_mode
agent = instance.agent
@@ -373,7 +542,7 @@ class Alert(models.Model):
text_on_resolved = alert_template.check_text_on_resolved
run_script_action = alert_template.check_script_actions
elif isinstance(instance, AutomatedTask):
elif isinstance(instance, TaskResult):
from autotasks.tasks import (
handle_resolved_task_email_alert,
handle_resolved_task_sms_alert,
@@ -383,7 +552,6 @@ class Alert(models.Model):
resolved_text_task = handle_resolved_task_sms_alert
alert_template = instance.agent.alert_template
alert = cls.objects.get(assigned_task=instance, resolved=False)
maintenance_mode = instance.agent.maintenance_mode
agent = instance.agent
@@ -395,8 +563,10 @@ class Alert(models.Model):
else:
return
alert = instance.get_or_create_alert_if_needed(alert_template)
# return if agent is in maintenance mode
if maintenance_mode:
if not alert or maintenance_mode:
return
alert.resolve()
@@ -413,7 +583,7 @@ class Alert(models.Model):
if (
alert_template
and alert_template.resolved_action
and run_script_action # type: ignore
and run_script_action
and not alert.resolved_action_run
):
r = agent.run_script(
@@ -426,7 +596,7 @@ class Alert(models.Model):
)
# command was successful
if type(r) == dict:
if isinstance(r, dict):
alert.resolved_action_retcode = r["retcode"]
alert.resolved_action_stdout = r["stdout"]
alert.resolved_action_stderr = r["stderr"]
@@ -438,11 +608,11 @@ class Alert(models.Model):
else:
DebugLog.error(
agent=agent,
log_type="scripting",
log_type=DebugLogType.SCRIPTING,
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
)
def parse_script_args(self, args: list[str]):
def parse_script_args(self, args: List[str]) -> List[str]:
if not args:
return []
@@ -463,9 +633,9 @@ class Alert(models.Model):
continue
try:
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
except Exception as e:
DebugLog.error(log_type="scripting", message=str(e))
DebugLog.error(log_type=DebugLogType.SCRIPTING, message=str(e))
continue
else:
@@ -595,10 +765,10 @@ class AlertTemplate(BaseAuditModel):
"agents.Agent", related_name="alert_exclusions", blank=True
)
def __str__(self):
def __str__(self) -> str:
return self.name
def is_agent_excluded(self, agent):
def is_agent_excluded(self, agent: "Agent") -> bool:
return (
agent in self.excluded_agents.all()
or agent.site in self.excluded_sites.all()
@@ -610,7 +780,7 @@ class AlertTemplate(BaseAuditModel):
)
@staticmethod
def serialize(alert_template):
def serialize(alert_template: AlertTemplate) -> Dict[str, Any]:
# serializes the agent and returns json
from .serializers import AlertTemplateAuditSerializer

View File

@@ -1,10 +1,15 @@
from typing import TYPE_CHECKING
from django.shortcuts import get_object_or_404
from rest_framework import permissions
from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
if TYPE_CHECKING:
from accounts.models import User
def _has_perm_on_alert(user, id: int):
def _has_perm_on_alert(user: "User", id: int) -> bool:
from alerts.models import Alert
role = user.role
@@ -19,10 +24,6 @@ def _has_perm_on_alert(user, id: int):
if alert.agent:
agent_id = alert.agent.agent_id
elif alert.assigned_check:
agent_id = alert.assigned_check.agent.agent_id
elif alert.assigned_task:
agent_id = alert.assigned_task.agent.agent_id
else:
return True
@@ -30,7 +31,7 @@ def _has_perm_on_alert(user, id: int):
class AlertPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET" or r.method == "PATCH":
if "pk" in view.kwargs.keys():
return _has_perm(r, "can_list_alerts") and _has_perm_on_alert(
@@ -48,7 +49,7 @@ class AlertPerms(permissions.BasePermission):
class AlertTemplatePerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
return _has_perm(r, "can_list_alerttemplates")
else:

View File

@@ -1,105 +1,19 @@
from automation.serializers import PolicySerializer
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
from rest_framework.fields import SerializerMethodField
from rest_framework.serializers import ModelSerializer, ReadOnlyField
from tacticalrmm.utils import get_default_timezone
from automation.serializers import PolicySerializer
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
from .models import Alert, AlertTemplate
class AlertSerializer(ModelSerializer):
hostname = SerializerMethodField()
agent_id = SerializerMethodField()
client = SerializerMethodField()
site = SerializerMethodField()
alert_time = SerializerMethodField()
resolve_on = SerializerMethodField()
snoozed_until = SerializerMethodField()
def get_agent_id(self, instance):
if instance.alert_type == "availability":
return instance.agent.agent_id if instance.agent else ""
elif instance.alert_type == "check":
return (
instance.assigned_check.agent.agent_id
if instance.assigned_check
else ""
)
elif instance.alert_type == "task":
return (
instance.assigned_task.agent.agent_id if instance.assigned_task else ""
)
else:
return ""
def get_hostname(self, instance):
if instance.alert_type == "availability":
return instance.agent.hostname if instance.agent else ""
elif instance.alert_type == "check":
return (
instance.assigned_check.agent.hostname
if instance.assigned_check
else ""
)
elif instance.alert_type == "task":
return (
instance.assigned_task.agent.hostname if instance.assigned_task else ""
)
else:
return ""
def get_client(self, instance):
if instance.alert_type == "availability":
return instance.agent.client.name if instance.agent else ""
elif instance.alert_type == "check":
return (
instance.assigned_check.agent.client.name
if instance.assigned_check
else ""
)
elif instance.alert_type == "task":
return (
instance.assigned_task.agent.client.name
if instance.assigned_task
else ""
)
else:
return ""
def get_site(self, instance):
if instance.alert_type == "availability":
return instance.agent.site.name if instance.agent else ""
elif instance.alert_type == "check":
return (
instance.assigned_check.agent.site.name
if instance.assigned_check
else ""
)
elif instance.alert_type == "task":
return (
instance.assigned_task.agent.site.name if instance.assigned_task else ""
)
else:
return ""
def get_alert_time(self, instance):
if instance.alert_time:
return instance.alert_time.astimezone(get_default_timezone()).timestamp()
else:
return None
def get_resolve_on(self, instance):
if instance.resolved_on:
return instance.resolved_on.astimezone(get_default_timezone()).timestamp()
else:
return None
def get_snoozed_until(self, instance):
if instance.snooze_until:
return instance.snooze_until.astimezone(get_default_timezone()).timestamp()
return None
hostname = ReadOnlyField(source="assigned_agent.hostname")
agent_id = ReadOnlyField(source="assigned_agent.agent_id")
client = ReadOnlyField(source="client.name")
site = ReadOnlyField(source="site.name")
alert_time = ReadOnlyField()
class Meta:
model = Alert
@@ -121,11 +35,11 @@ class AlertTemplateSerializer(ModelSerializer):
fields = "__all__"
def get_applied_count(self, instance):
count = 0
count += instance.policies.count()
count += instance.clients.count()
count += instance.sites.count()
return count
return (
instance.policies.count()
+ instance.clients.count()
+ instance.sites.count()
)
class AlertTemplateRelationSerializer(ModelSerializer):

View File

@@ -1,12 +1,13 @@
from django.utils import timezone as djangotime
from agents.models import Agent
from tacticalrmm.celery import app
from .models import Alert
@app.task
def unsnooze_alerts() -> str:
from .models import Alert
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
snoozed=False, snooze_until=None
)
@@ -15,10 +16,10 @@ def unsnooze_alerts() -> str:
@app.task
def cache_agents_alert_template():
from agents.models import Agent
for agent in Agent.objects.only("pk"):
def cache_agents_alert_template() -> str:
for agent in Agent.objects.only(
"pk", "site", "policy", "alert_template"
).select_related("site", "policy", "alert_template"):
agent.set_alert_template()
return "ok"
@@ -26,8 +27,6 @@ def cache_agents_alert_template():
@app.task
def prune_resolved_alerts(older_than_days: int) -> str:
from .models import Alert
Alert.objects.filter(resolved=True).filter(
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
).delete()

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from tacticalrmm.utils import notify_error
from tacticalrmm.helpers import notify_error
from .models import Alert, AlertTemplate
from .permissions import AlertPerms, AlertTemplatePerms
@@ -92,7 +92,7 @@ class GetAddAlerts(APIView):
)
alerts = (
Alert.objects.filter_by_role(request.user)
Alert.objects.filter_by_role(request.user) # type: ignore
.filter(clientFilter)
.filter(severityFilter)
.filter(resolvedFilter)
@@ -102,7 +102,7 @@ class GetAddAlerts(APIView):
return Response(AlertSerializer(alerts, many=True).data)
else:
alerts = Alert.objects.filter_by_role(request.user)
alerts = Alert.objects.filter_by_role(request.user) # type: ignore
return Response(AlertSerializer(alerts, many=True).data)
def post(self, request):

View File

View File

@@ -1,11 +1,7 @@
import json
import os
from autotasks.models import AutomatedTask
from django.conf import settings
from django.utils import timezone as djangotime
from model_bakery import baker
from autotasks.models import TaskResult
from tacticalrmm.test import TacticalTestCase
@@ -16,46 +12,53 @@ class TestAPIv3(TacticalTestCase):
self.agent = baker.make_recipe("agents.agent")
def test_get_checks(self):
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"
agent = baker.make_recipe("agents.agent")
url = f"/api/v3/{agent.agent_id}/checkrunner/"
# add a check
check1 = baker.make_recipe("checks.ping_check", agent=self.agent)
check1 = baker.make_recipe("checks.ping_check", agent=agent)
check_result1 = baker.make(
"checks.CheckResult", agent=agent, assigned_check=check1
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], self.agent.check_interval) # type: ignore
self.assertEqual(len(r.data["checks"]), 1) # type: ignore
self.assertEqual(r.data["check_interval"], self.agent.check_interval)
self.assertEqual(len(r.data["checks"]), 1)
# override check run interval
check2 = baker.make_recipe(
"checks.ping_check", agent=self.agent, run_interval=20
"checks.diskspace_check", agent=agent, run_interval=20
)
check_result2 = baker.make(
"checks.CheckResult", agent=agent, assigned_check=check2
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], 20) # type: ignore
self.assertEqual(len(r.data["checks"]), 2) # type: ignore
self.assertEqual(len(r.data["checks"]), 2)
self.assertEqual(r.data["check_interval"], 20)
# Set last_run on both checks and should return an empty list
check1.last_run = djangotime.now()
check1.save()
check2.last_run = djangotime.now()
check2.save()
check_result1.last_run = djangotime.now()
check_result1.save()
check_result2.last_run = djangotime.now()
check_result2.save()
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], 20) # type: ignore
self.assertFalse(r.data["checks"]) # type: ignore
self.assertEqual(r.data["check_interval"], 20)
self.assertFalse(r.data["checks"])
# set last_run greater than interval
check1.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
check1.save()
check2.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
check2.save()
check_result1.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
check_result1.save()
check_result2.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
check_result2.save()
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], 20) # type: ignore
self.assertEquals(len(r.data["checks"]), 2) # type: ignore
self.assertEqual(r.data["check_interval"], 20)
self.assertEqual(len(r.data["checks"]), 2)
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
r = self.client.get(url)
@@ -63,24 +66,6 @@ class TestAPIv3(TacticalTestCase):
self.check_not_authenticated("get", url)
def test_sysinfo(self):
# TODO replace this with golang wmi sample data
url = "/api/v3/sysinfo/"
with open(
os.path.join(
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
)
) as f:
wmi_py = json.load(f)
payload = {"agent_id": self.agent.agent_id, "sysinfo": wmi_py}
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("patch", url)
def test_checkrunner_interval(self):
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
r = self.client.get(url, format="json")
@@ -130,21 +115,30 @@ class TestAPIv3(TacticalTestCase):
self.assertEqual(len(r.json()["checks"]), 15)
def test_task_runner_get(self):
from autotasks.serializers import TaskGOGetSerializer
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
self.assertEqual(r.status_code, 404)
# setup data
agent = baker.make_recipe("agents.agent")
script = baker.make_recipe("scripts.script")
task = baker.make("autotasks.AutomatedTask", agent=agent, script=script)
script = baker.make("scripts.script")
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
# setup data
task_actions = [
{"type": "cmd", "command": "whoami", "timeout": 10, "shell": "cmd"},
{
"type": "script",
"script": script.id,
"script_args": ["test"],
"timeout": 30,
},
{"type": "script", "script": 3, "script_args": [], "timeout": 30},
]
agent = baker.make_recipe("agents.agent")
task = baker.make("autotasks.AutomatedTask", agent=agent, actions=task_actions)
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(TaskGOGetSerializer(task).data, r.data) # type: ignore
def test_task_runner_results(self):
from agents.models import AgentCustomField
@@ -155,8 +149,9 @@ class TestAPIv3(TacticalTestCase):
# setup data
agent = baker.make_recipe("agents.agent")
task = baker.make("autotasks.AutomatedTask", agent=agent)
task_result = baker.make("autotasks.TaskResult", agent=agent, task=task)
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/"
# test passing task
data = {
@@ -168,7 +163,7 @@ class TestAPIv3(TacticalTestCase):
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "passing") # type: ignore
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "passing")
# test failing task
data = {
@@ -180,7 +175,7 @@ class TestAPIv3(TacticalTestCase):
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "failing")
# test collector task
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
@@ -192,8 +187,8 @@ class TestAPIv3(TacticalTestCase):
)
# test text fields
task.custom_field = text # type: ignore
task.save() # type: ignore
task.custom_field = text
task.save()
# test failing failing with stderr
data = {
@@ -205,7 +200,7 @@ class TestAPIv3(TacticalTestCase):
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
self.assertTrue(TaskResult.objects.get(pk=task_result.pk).status == "failing")
# test saving to text field
data = {
@@ -217,12 +212,15 @@ class TestAPIv3(TacticalTestCase):
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertEqual(AgentCustomField.objects.get(field=text, agent=task.agent).value, "the last line") # type: ignore
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
self.assertEqual(
AgentCustomField.objects.get(field=text, agent=task.agent).value,
"the last line",
)
# test saving to checkbox field
task.custom_field = boolean # type: ignore
task.save() # type: ignore
task.custom_field = boolean
task.save()
data = {
"stdout": "1",
@@ -233,12 +231,14 @@ class TestAPIv3(TacticalTestCase):
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertTrue(AgentCustomField.objects.get(field=boolean, agent=task.agent).value) # type: ignore
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
self.assertTrue(
AgentCustomField.objects.get(field=boolean, agent=task.agent).value
)
# test saving to multiple field with commas
task.custom_field = multiple # type: ignore
task.save() # type: ignore
task.custom_field = multiple
task.save()
data = {
"stdout": "this,is,an,array",
@@ -249,8 +249,11 @@ class TestAPIv3(TacticalTestCase):
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this", "is", "an", "array"]) # type: ignore
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
self.assertEqual(
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
["this", "is", "an", "array"],
)
# test mutiple with a single value
data = {
@@ -262,5 +265,8 @@ class TestAPIv3(TacticalTestCase):
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this"]) # type: ignore
self.assertEqual(TaskResult.objects.get(pk=task_result.pk).status, "passing")
self.assertEqual(
AgentCustomField.objects.get(field=multiple, agent=task.agent).value,
["this"],
)

View File

@@ -9,7 +9,6 @@ urlpatterns = [
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
path("meshexe/", views.MeshExe.as_view()),
path("sysinfo/", views.SysInfo.as_view()),
path("newagent/", views.NewAgent.as_view()),
path("software/", views.Software.as_view()),
path("installer/", views.Installer.as_view()),

View File

@@ -1,30 +1,44 @@
import asyncio
import time
from accounts.models import User
from agents.models import Agent, AgentHistory
from agents.serializers import AgentHistorySerializer
from autotasks.models import AutomatedTask
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
from checks.models import Check
from checks.serializers import CheckRunnerGetSerializer
from core.models import CoreSettings
from core.utils import download_mesh_agent, get_mesh_device_id, get_mesh_ws_url
from django.conf import settings
from django.db.models import Prefetch
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from logs.models import DebugLog, PendingAction
from packaging import version as pyver
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from software.models import InstalledSoftware
from winupdate.models import WinUpdate, WinUpdatePolicy
from tacticalrmm.constants import MeshAgentIdent
from tacticalrmm.utils import notify_error, reload_nats
from accounts.models import User
from agents.models import Agent, AgentHistory
from agents.serializers import AgentHistorySerializer
from autotasks.models import AutomatedTask, TaskResult
from autotasks.serializers import TaskGOGetSerializer, TaskResultSerializer
from checks.constants import CHECK_DEFER, CHECK_RESULT_DEFER
from checks.models import Check, CheckResult
from checks.serializers import CheckRunnerGetSerializer
from core.utils import (
download_mesh_agent,
get_core_settings,
get_mesh_device_id,
get_mesh_ws_url,
)
from logs.models import DebugLog, PendingAction
from software.models import InstalledSoftware
from tacticalrmm.constants import (
AGENT_DEFER,
AuditActionType,
AuditObjType,
CheckStatus,
DebugLogType,
MeshAgentIdent,
PAStatus,
)
from tacticalrmm.helpers import notify_error
from tacticalrmm.utils import reload_nats
from winupdate.models import WinUpdate, WinUpdatePolicy
class CheckIn(APIView):
@@ -34,11 +48,12 @@ class CheckIn(APIView):
# called once during tacticalagent windows service startup
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
if not agent.choco_installed:
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
time.sleep(0.5)
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
return Response("ok")
@@ -48,7 +63,9 @@ class SyncMeshNodeID(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
if agent.mesh_node_id != request.data["nodeid"]:
agent.mesh_node_id = request.data["nodeid"]
agent.save(update_fields=["mesh_node_id"])
@@ -61,7 +78,9 @@ class Choco(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
agent.choco_installed = request.data["installed"]
agent.save(update_fields=["choco_installed"])
return Response("ok")
@@ -72,7 +91,9 @@ class WinUpdates(APIView):
permission_classes = [IsAuthenticated]
def put(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
needs_reboot: bool = request.data["needs_reboot"]
agent.needs_reboot = needs_reboot
@@ -90,7 +111,7 @@ class WinUpdates(APIView):
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
DebugLog.info(
agent=agent,
log_type="windows_updates",
log_type=DebugLogType.WIN_UPDATES,
message=f"{agent.hostname} is rebooting after updates were installed.",
)
@@ -98,8 +119,13 @@ class WinUpdates(APIView):
return Response("ok")
def patch(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
u = agent.winupdates.filter(guid=request.data["guid"]).last() # type: ignore
if not u:
raise WinUpdate.DoesNotExist
success: bool = request.data["success"]
if success:
u.result = "success"
@@ -122,8 +148,14 @@ class WinUpdates(APIView):
return Response("ok")
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
updates = request.data["wua_updates"]
if not updates:
return notify_error("Empty payload")
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
for update in updates:
if agent.winupdates.filter(guid=update["guid"]).exists(): # type: ignore
u = agent.winupdates.filter(guid=update["guid"]).last() # type: ignore
@@ -162,7 +194,9 @@ class SupersededWinUpdate(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
updates = agent.winupdates.filter(guid=request.data["guid"]) # type: ignore
for u in updates:
u.delete()
@@ -175,12 +209,19 @@ class RunChecks(APIView):
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER).prefetch_related(
Prefetch("agentchecks", queryset=Check.objects.select_related("script"))
),
agent_id=agentid,
)
checks = agent.get_checks_with_policies(exclude_overridden=True)
ret = {
"agent": agent.pk,
"check_interval": agent.check_interval,
"checks": CheckRunnerGetSerializer(checks, many=True).data,
"checks": CheckRunnerGetSerializer(
checks, context={"agent": agent}, many=True
).data,
}
return Response(ret)
@@ -190,45 +231,72 @@ class CheckRunner(APIView):
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
checks = agent.agentchecks.filter(overriden_by_policy=False) # type: ignore
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER).prefetch_related(
Prefetch("agentchecks", queryset=Check.objects.select_related("script"))
),
agent_id=agentid,
)
checks = agent.get_checks_with_policies(exclude_overridden=True)
run_list = [
check
for check in checks
# always run if check hasn't run yet
if not check.last_run
# if a check interval is set, see if the correct amount of seconds have passed
if not isinstance(check.check_result, CheckResult)
or not check.check_result.last_run
# see if the correct amount of seconds have passed
or (
check.run_interval
and (
check.last_run
< djangotime.now()
- djangotime.timedelta(seconds=check.run_interval)
check.check_result.last_run
< djangotime.now()
- djangotime.timedelta(
seconds=check.run_interval
if check.run_interval
else agent.check_interval
)
)
# if check interval isn't set, make sure the agent's check interval has passed before running
or (
not check.run_interval
and check.last_run
< djangotime.now() - djangotime.timedelta(seconds=agent.check_interval)
)
]
ret = {
"agent": agent.pk,
"check_interval": agent.check_run_interval(),
"checks": CheckRunnerGetSerializer(run_list, many=True).data,
"checks": CheckRunnerGetSerializer(
run_list, context={"agent": agent}, many=True
).data,
}
return Response(ret)
def patch(self, request):
check = get_object_or_404(Check, pk=request.data["id"])
if "agent_id" not in request.data.keys():
return notify_error("Agent upgrade required")
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
status = check.handle_check(request.data)
if status == "failing" and check.assignedtask.exists(): # type: ignore
check.handle_assigned_task()
check = get_object_or_404(
Check.objects.defer(*CHECK_DEFER),
pk=request.data["id"],
)
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER), agent_id=request.data["agent_id"]
)
# get check result or create if doesn't exist
check_result, created = CheckResult.objects.defer(
*CHECK_RESULT_DEFER
).get_or_create(
assigned_check=check,
agent=agent,
)
if created:
check_result.save()
status = check_result.handle_check(request.data, check, agent)
if status == CheckStatus.FAILING and check.assignedtasks.exists():
for task in check.assignedtasks.all():
if task.enabled:
if task.policy:
task.run_win_task(agent)
else:
task.run_win_task()
return Response("ok")
@@ -238,7 +306,10 @@ class CheckRunnerInterval(APIView):
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER).prefetch_related("agentchecks"),
agent_id=agentid,
)
return Response(
{"agent": agent.pk, "check_interval": agent.check_run_interval()}
@@ -250,65 +321,71 @@ class TaskRunner(APIView):
permission_classes = [IsAuthenticated]
def get(self, request, pk, agentid):
_ = get_object_or_404(Agent, agent_id=agentid)
agent = get_object_or_404(Agent.objects.defer(*AGENT_DEFER), agent_id=agentid)
task = get_object_or_404(AutomatedTask, pk=pk)
return Response(TaskGOGetSerializer(task).data)
return Response(TaskGOGetSerializer(task, context={"agent": agent}).data)
def patch(self, request, pk, agentid):
from alerts.models import Alert
agent = get_object_or_404(Agent, agent_id=agentid)
task = get_object_or_404(AutomatedTask, pk=pk)
serializer = TaskRunnerPatchSerializer(
instance=task, data=request.data, partial=True
agent = get_object_or_404(
Agent.objects.defer(*AGENT_DEFER),
agent_id=agentid,
)
task = get_object_or_404(
AutomatedTask.objects.select_related("custom_field"), pk=pk
)
# get task result or create if doesn't exist
try:
task_result = (
TaskResult.objects.select_related("agent")
.defer("agent__services", "agent__wmi_detail")
.get(task=task, agent=agent)
)
serializer = TaskResultSerializer(
data=request.data, instance=task_result, partial=True
)
except TaskResult.DoesNotExist:
serializer = TaskResultSerializer(data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
new_task = serializer.save(last_run=djangotime.now())
task_result = serializer.save(last_run=djangotime.now())
AgentHistory.objects.create(
agent=agent,
type="task_run",
script=task.script,
type=AuditActionType.TASK_RUN,
command=task.name,
script_results=request.data,
)
# check if task is a collector and update the custom field
if task.custom_field:
if not task.stderr:
if not task_result.stderr:
task.save_collector_results()
task_result.save_collector_results()
status = "passing"
status = CheckStatus.PASSING
else:
status = "failing"
status = CheckStatus.FAILING
else:
status = "failing" if task.retcode != 0 else "passing"
status = (
CheckStatus.FAILING if task_result.retcode != 0 else CheckStatus.PASSING
)
new_task.status = status
new_task.save()
if status == "passing":
if Alert.objects.filter(assigned_task=new_task, resolved=False).exists():
Alert.handle_alert_resolve(new_task)
if task_result:
task_result.status = status
task_result.save(update_fields=["status"])
else:
Alert.handle_alert_failure(new_task)
task_result.status = status
task.save(update_fields=["status"])
return Response("ok")
if status == CheckStatus.PASSING:
if Alert.create_or_return_task_alert(task, agent=agent, skip_create=True):
Alert.handle_alert_resolve(task_result)
else:
Alert.handle_alert_failure(task_result)
class SysInfo(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def patch(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
if not isinstance(request.data["sysinfo"], dict):
return notify_error("err")
agent.wmi_detail = request.data["sysinfo"]
agent.save(update_fields=["wmi_detail"])
return Response("ok")
@@ -324,7 +401,7 @@ class MeshExe(APIView):
case _:
return notify_error("Arch not specified")
core: CoreSettings = CoreSettings.objects.first() # type: ignore
core = get_core_settings()
try:
uri = get_mesh_ws_url()
@@ -388,8 +465,8 @@ class NewAgent(APIView):
AuditLog.objects.create(
username=request.user,
agent=agent.hostname,
object_type="agent",
action="agent_install",
object_type=AuditObjType.AGENT,
action=AuditActionType.AGENT_INSTALL,
message=f"{request.user} installed new agent {agent.hostname}",
after_value=Agent.serialize(agent),
debug_info={"ip": request._client_ip},
@@ -461,7 +538,7 @@ class ChocoResult(APIView):
action.details["output"] = results
action.details["installed"] = installed
action.status = "completed"
action.status = PAStatus.COMPLETED
action.save(update_fields=["details", "status"])
return Response("ok")
@@ -471,8 +548,9 @@ class AgentHistoryResult(APIView):
permission_classes = [IsAuthenticated]
def patch(self, request, agentid, pk):
_ = get_object_or_404(Agent, agent_id=agentid)
hist = get_object_or_404(AgentHistory, pk=pk)
hist = get_object_or_404(
AgentHistory.objects.filter(agent__agent_id=agentid), pk=pk
)
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
s.is_valid(raise_exception=True)
s.save()

View File

@@ -1,7 +1,16 @@
from agents.models import Agent
from core.models import CoreSettings
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from django.core.cache import cache
from django.db import models
from agents.models import Agent
from clients.models import Client, Site
from logs.models import BaseAuditModel
from tacticalrmm.constants import CORESETTINGS_CACHE_KEY, CheckType
if TYPE_CHECKING:
from autotasks.models import AutomatedTask
from checks.models import Check
class Policy(BaseAuditModel):
@@ -26,119 +35,186 @@ class Policy(BaseAuditModel):
"agents.Agent", related_name="policy_exclusions", blank=True
)
def save(self, *args, **kwargs):
def save(self, *args: Any, **kwargs: Any) -> None:
from alerts.tasks import cache_agents_alert_template
from automation.tasks import generate_agent_checks_task
# get old policy if exists
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
old_policy: Optional[Policy] = (
type(self).objects.get(pk=self.pk) if self.pk else None
)
super(Policy, self).save(old_model=old_policy, *args, **kwargs)
# generate agent checks only if active and enforced were changed
# check if alert template was changes and cache on agents
if old_policy:
if old_policy.active != self.active or old_policy.enforced != self.enforced:
generate_agent_checks_task.delay(
policy=self.pk,
create_tasks=True,
)
if old_policy.alert_template != self.alert_template:
cache_agents_alert_template.delay()
elif self.alert_template and old_policy.active != self.active:
cache_agents_alert_template.delay()
if old_policy.active != self.active or old_policy.enforced != self.enforced:
cache.delete(CORESETTINGS_CACHE_KEY)
cache.delete_many_pattern("site_workstation_*")
cache.delete_many_pattern("site_server_*")
cache.delete_many_pattern("agent_*")
def delete(self, *args, **kwargs):
from automation.tasks import generate_agent_checks_task
cache.delete(CORESETTINGS_CACHE_KEY)
cache.delete_many_pattern("site_workstation_*")
cache.delete_many_pattern("site_server_*")
cache.delete_many_pattern("agent_*")
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
super(Policy, self).delete(*args, **kwargs)
super(Policy, self).delete(
*args,
**kwargs,
)
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
def __str__(self):
def __str__(self) -> str:
return self.name
@property
def is_default_server_policy(self):
return self.default_server_policy.exists() # type: ignore
def is_default_server_policy(self) -> bool:
return self.default_server_policy.exists()
@property
def is_default_workstation_policy(self):
return self.default_workstation_policy.exists() # type: ignore
def is_default_workstation_policy(self) -> bool:
return self.default_workstation_policy.exists()
def is_agent_excluded(self, agent):
def is_agent_excluded(self, agent: "Agent") -> bool:
return (
agent in self.excluded_agents.all()
or agent.site in self.excluded_sites.all()
or agent.client in self.excluded_clients.all()
)
def related_agents(self):
return self.get_related("server") | self.get_related("workstation")
def related_agents(
self, mon_type: Optional[str] = None
) -> "models.QuerySet[Agent]":
models.prefetch_related_objects(
[self],
"excluded_agents",
"excluded_sites",
"excluded_clients",
"workstation_clients",
"server_clients",
"workstation_sites",
"server_sites",
"agents",
)
def get_related(self, mon_type):
explicit_agents = (
self.agents.filter(monitoring_type=mon_type) # type: ignore
.exclude(
pk__in=self.excluded_agents.only("pk").values_list("pk", flat=True)
agent_filter = {}
filtered_agents_ids = Agent.objects.none()
if mon_type:
agent_filter["monitoring_type"] = mon_type
excluded_clients_ids = self.excluded_clients.only("pk").values_list(
"id", flat=True
)
excluded_sites_ids = self.excluded_sites.only("pk").values_list("id", flat=True)
excluded_agents_ids = self.excluded_agents.only("pk").values_list(
"id", flat=True
)
if self.is_default_server_policy:
filtered_agents_ids |= (
Agent.objects.exclude(block_policy_inheritance=True)
.exclude(site__block_policy_inheritance=True)
.exclude(site__client__block_policy_inheritance=True)
.exclude(id__in=excluded_agents_ids)
.exclude(site_id__in=excluded_sites_ids)
.exclude(site__client_id__in=excluded_clients_ids)
.filter(monitoring_type="server")
.only("id")
.values_list("id", flat=True)
)
.exclude(site__in=self.excluded_sites.all())
.exclude(site__client__in=self.excluded_clients.all())
if self.is_default_workstation_policy:
filtered_agents_ids |= (
Agent.objects.exclude(block_policy_inheritance=True)
.exclude(site__block_policy_inheritance=True)
.exclude(site__client__block_policy_inheritance=True)
.exclude(id__in=excluded_agents_ids)
.exclude(site_id__in=excluded_sites_ids)
.exclude(site__client_id__in=excluded_clients_ids)
.filter(monitoring_type="workstation")
.only("id")
.values_list("id", flat=True)
)
# if this is the default policy for servers and workstations and skip the other calculations
if self.is_default_server_policy and self.is_default_workstation_policy:
return Agent.objects.filter(models.Q(id__in=filtered_agents_ids))
explicit_agents = (
self.agents.filter(**agent_filter) # type: ignore
.exclude(id__in=excluded_agents_ids)
.exclude(site_id__in=excluded_sites_ids)
.exclude(site__client_id__in=excluded_clients_ids)
)
explicit_clients = getattr(self, f"{mon_type}_clients").exclude(
pk__in=self.excluded_clients.all()
)
explicit_sites = getattr(self, f"{mon_type}_sites").exclude(
pk__in=self.excluded_sites.all()
)
explicit_clients_qs = Client.objects.none()
explicit_sites_qs = Site.objects.none()
filtered_agents_pks = Policy.objects.none()
if not mon_type or mon_type == "workstation":
explicit_clients_qs |= self.workstation_clients.exclude( # type: ignore
id__in=excluded_clients_ids
)
explicit_sites_qs |= self.workstation_sites.exclude( # type: ignore
id__in=excluded_sites_ids
)
filtered_agents_pks |= (
if not mon_type or mon_type == "server":
explicit_clients_qs |= self.server_clients.exclude( # type: ignore
id__in=excluded_clients_ids
)
explicit_sites_qs |= self.server_sites.exclude( # type: ignore
id__in=excluded_sites_ids
)
filtered_agents_ids |= (
Agent.objects.exclude(block_policy_inheritance=True)
.filter(
site__in=[
site
for site in explicit_sites
if site.client not in explicit_clients
and site.client not in self.excluded_clients.all()
site_id__in=[
site.id
for site in explicit_sites_qs
if site.client not in explicit_clients_qs
and site.client.id not in excluded_clients_ids
],
monitoring_type=mon_type,
**agent_filter,
)
.values_list("pk", flat=True)
.only("id")
.values_list("id", flat=True)
)
filtered_agents_pks |= (
filtered_agents_ids |= (
Agent.objects.exclude(block_policy_inheritance=True)
.exclude(site__block_policy_inheritance=True)
.filter(
site__client__in=[client for client in explicit_clients],
monitoring_type=mon_type,
site__client__in=explicit_clients_qs,
**agent_filter,
)
.values_list("pk", flat=True)
.only("id")
.values_list("id", flat=True)
)
return Agent.objects.filter(
models.Q(pk__in=filtered_agents_pks)
| models.Q(pk__in=explicit_agents.only("pk"))
models.Q(id__in=filtered_agents_ids)
| models.Q(id__in=explicit_agents.only("id"))
)
@staticmethod
def serialize(policy):
def serialize(policy: "Policy") -> Dict[str, Any]:
# serializes the policy and returns json
from .serializers import PolicyAuditSerializer
return PolicyAuditSerializer(policy).data
@staticmethod
def cascade_policy_tasks(agent):
def get_policy_tasks(agent: "Agent") -> "List[AutomatedTask]":
# List of all tasks to be applied
tasks = list()
agent_tasks_parent_pks = [
task.parent_task for task in agent.autotasks.filter(managed_by_policy=True)
]
# Get policies applied to agent and agent site and client
policies = agent.get_agent_policies()
@@ -150,36 +226,13 @@ class Policy(BaseAuditModel):
for task in policy.autotasks.all():
tasks.append(task)
# remove policy tasks from agent not included in policy
for task in agent.autotasks.filter(
parent_task__in=[
taskpk
for taskpk in agent_tasks_parent_pks
if taskpk not in [task.pk for task in tasks]
]
):
if task.sync_status == "initial":
task.delete()
else:
task.sync_status = "pendingdeletion"
task.save()
# change tasks from pendingdeletion to notsynced if policy was added or changed
agent.autotasks.filter(sync_status="pendingdeletion").filter(
parent_task__in=[taskpk for taskpk in [task.pk for task in tasks]]
).update(sync_status="notsynced")
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
return tasks
@staticmethod
def cascade_policy_checks(agent):
# Get checks added to agent directly
agent_checks = list(agent.agentchecks.filter(managed_by_policy=False))
def get_policy_checks(agent: "Agent") -> "List[Check]":
agent_checks_parent_pks = [
check.parent_check
for check in agent.agentchecks.filter(managed_by_policy=True)
]
# Get checks added to agent directly
agent_checks = list(agent.agentchecks.all())
# Get policies applied to agent and agent site and client
policies = agent.get_agent_policies()
@@ -201,71 +254,72 @@ class Policy(BaseAuditModel):
for check in policy.policychecks.all():
policy_checks.append(check)
# Sorted Checks already added
added_diskspace_checks = list()
added_ping_checks = list()
added_winsvc_checks = list()
added_script_checks = list()
added_eventlog_checks = list()
added_cpuload_checks = list()
added_memory_checks = list()
if not enforced_checks and not policy_checks:
return []
# Lists all agent and policy checks that will be created
diskspace_checks = list()
ping_checks = list()
winsvc_checks = list()
script_checks = list()
eventlog_checks = list()
cpuload_checks = list()
memory_checks = list()
# Sorted Checks already added
added_diskspace_checks: List[str] = list()
added_ping_checks: List[str] = list()
added_winsvc_checks: List[str] = list()
added_script_checks: List[int] = list()
added_eventlog_checks: List[List[str]] = list()
added_cpuload_checks: List[int] = list()
added_memory_checks: List[int] = list()
# Lists all agent and policy checks that will be returned
diskspace_checks: "List[Check]" = list()
ping_checks: "List[Check]" = list()
winsvc_checks: "List[Check]" = list()
script_checks: "List[Check]" = list()
eventlog_checks: "List[Check]" = list()
cpuload_checks: "List[Check]" = list()
memory_checks: "List[Check]" = list()
overridden_checks: List[int] = list()
# Loop over checks in with enforced policies first, then non-enforced policies
for check in enforced_checks + agent_checks + policy_checks:
if check.check_type == "diskspace" and agent.plat == "windows":
if check.check_type == CheckType.DISK_SPACE and agent.plat == "windows":
# Check if drive letter was already added
if check.disk not in added_diskspace_checks:
added_diskspace_checks.append(check.disk)
# Dont create the check if it is an agent check
# Dont add if check if it is an agent check
if not check.agent:
diskspace_checks.append(check)
elif check.agent:
check.overriden_by_policy = True
check.save()
overridden_checks.append(check.pk)
if check.check_type == "ping":
elif check.check_type == CheckType.PING:
# Check if IP/host was already added
if check.ip not in added_ping_checks:
added_ping_checks.append(check.ip)
# Dont create the check if it is an agent check
# Dont add if the check if it is an agent check
if not check.agent:
ping_checks.append(check)
elif check.agent:
check.overriden_by_policy = True
check.save()
overridden_checks.append(check.pk)
if check.check_type == "cpuload" and agent.plat == "windows":
elif check.check_type == CheckType.CPU_LOAD and agent.plat == "windows":
# Check if cpuload list is empty
if not added_cpuload_checks:
added_cpuload_checks.append(check)
added_cpuload_checks.append(check.pk)
# Dont create the check if it is an agent check
if not check.agent:
cpuload_checks.append(check)
elif check.agent:
check.overriden_by_policy = True
check.save()
overridden_checks.append(check.pk)
if check.check_type == "memory" and agent.plat == "windows":
elif check.check_type == CheckType.MEMORY and agent.plat == "windows":
# Check if memory check list is empty
if not added_memory_checks:
added_memory_checks.append(check)
added_memory_checks.append(check.pk)
# Dont create the check if it is an agent check
if not check.agent:
memory_checks.append(check)
elif check.agent:
check.overriden_by_policy = True
check.save()
overridden_checks.append(check.pk)
if check.check_type == "winsvc" and agent.plat == "windows":
elif check.check_type == CheckType.WINSVC and agent.plat == "windows":
# Check if service name was already added
if check.svc_name not in added_winsvc_checks:
added_winsvc_checks.append(check.svc_name)
@@ -273,10 +327,9 @@ class Policy(BaseAuditModel):
if not check.agent:
winsvc_checks.append(check)
elif check.agent:
check.overriden_by_policy = True
check.save()
overridden_checks.append(check.pk)
if check.check_type == "script" and agent.is_supported_script(
elif check.check_type == CheckType.SCRIPT and agent.is_supported_script(
check.script.supported_platforms
):
# Check if script id was already added
@@ -286,20 +339,25 @@ class Policy(BaseAuditModel):
if not check.agent:
script_checks.append(check)
elif check.agent:
check.overriden_by_policy = True
check.save()
overridden_checks.append(check.pk)
if check.check_type == "eventlog" and agent.plat == "windows":
elif check.check_type == CheckType.EVENT_LOG and agent.plat == "windows":
# Check if events were already added
if [check.log_name, check.event_id] not in added_eventlog_checks:
added_eventlog_checks.append([check.log_name, check.event_id])
if not check.agent:
eventlog_checks.append(check)
elif check.agent:
check.overriden_by_policy = True
check.save()
overridden_checks.append(check.pk)
final_list = (
if overridden_checks:
from checks.models import Check
Check.objects.filter(pk__in=overridden_checks).update(
overridden_by_policy=True
)
return (
diskspace_checks
+ ping_checks
+ cpuload_checks
@@ -308,33 +366,3 @@ class Policy(BaseAuditModel):
+ script_checks
+ eventlog_checks
)
# remove policy checks from agent that fell out of policy scope
agent.agentchecks.filter(
managed_by_policy=True,
parent_check__in=[
checkpk
for checkpk in agent_checks_parent_pks
if checkpk not in [check.pk for check in final_list]
],
).delete()
return [
check for check in final_list if check.pk not in agent_checks_parent_pks
]
@staticmethod
def generate_policy_checks(agent):
checks = Policy.cascade_policy_checks(agent)
if checks:
for check in checks:
check.create_policy_check(agent)
@staticmethod
def generate_policy_tasks(agent):
tasks = Policy.cascade_policy_tasks(agent)
if tasks:
for task in tasks:
task.create_policy_task(agent)

View File

@@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm
class AutomationPolicyPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
return _has_perm(r, "can_list_automation_policies")
else:

View File

@@ -1,13 +1,14 @@
from agents.serializers import AgentHostnameSerializer
from autotasks.models import AutomatedTask
from checks.models import Check
from clients.models import Client
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
from rest_framework.serializers import (
ModelSerializer,
ReadOnlyField,
SerializerMethodField,
)
from agents.serializers import AgentHostnameSerializer
from autotasks.models import TaskResult
from checks.models import CheckResult
from clients.models import Client
from clients.serializers import ClientMinimumSerializer, SiteMinimumSerializer
from winupdate.serializers import WinUpdatePolicySerializer
from .models import Policy
@@ -95,7 +96,7 @@ class PolicyCheckStatusSerializer(ModelSerializer):
hostname = ReadOnlyField(source="agent.hostname")
class Meta:
model = Check
model = CheckResult
fields = "__all__"
@@ -103,7 +104,7 @@ class PolicyTaskStatusSerializer(ModelSerializer):
hostname = ReadOnlyField(source="agent.hostname")
class Meta:
model = AutomatedTask
model = TaskResult
fields = "__all__"

View File

@@ -1,155 +1,20 @@
from typing import Any, Dict, List, Union
from tacticalrmm.celery import app
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
def generate_agent_checks_task(
policy: int = None,
site: int = None,
client: int = None,
agents: List[int] = list(),
all: bool = False,
create_tasks: bool = False,
) -> Union[str, None]:
from agents.models import Agent
from automation.models import Policy
p = Policy.objects.get(pk=policy) if policy else None
# generate checks on all agents if all is specified or if policy is default server/workstation policy
if (p and p.is_default_server_policy and p.is_default_workstation_policy) or all:
a = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
# generate checks on all servers if policy is a default servers policy
elif p and p.is_default_server_policy:
a = Agent.objects.filter(monitoring_type="server").only("pk", "monitoring_type")
# generate checks on all workstations if policy is a default workstations policy
elif p and p.is_default_workstation_policy:
a = Agent.objects.filter(monitoring_type="workstation").only(
"pk", "monitoring_type"
)
# generate checks on a list of supplied agents
elif agents:
a = Agent.objects.filter(pk__in=agents)
# generate checks on agents affected by supplied policy
elif policy:
a = p.related_agents().only("pk")
# generate checks that has specified site
elif site:
a = Agent.objects.filter(site_id=site)
# generate checks that has specified client
elif client:
a = Agent.objects.filter(site__client_id=client)
else:
a = []
for agent in a:
agent.generate_checks_from_policies()
if create_tasks:
agent.generate_tasks_from_policies()
agent.set_alert_template()
return "ok"
@app.task(
acks_late=True, retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5}
)
# updates policy managed check fields on agents
def update_policy_check_fields_task(check: int) -> str:
from checks.models import Check
c: Check = Check.objects.get(pk=check)
update_fields: Dict[Any, Any] = {}
for field in c.policy_fields_to_copy:
update_fields[field] = getattr(c, field)
Check.objects.filter(parent_check=check).update(**update_fields)
return "ok"
@app.task(retry_backoff=5, retry_jitter=True, retry_kwargs={"max_retries": 5})
# generates policy tasks on agents affected by a policy
def generate_agent_autotasks_task(policy: int = None) -> str:
from agents.models import Agent
from automation.models import Policy
p: Policy = Policy.objects.get(pk=policy)
if p and p.is_default_server_policy and p.is_default_workstation_policy:
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
elif p and p.is_default_server_policy:
agents = Agent.objects.filter(monitoring_type="server").only(
"pk", "monitoring_type"
)
elif p and p.is_default_workstation_policy:
agents = Agent.objects.filter(monitoring_type="workstation").only(
"pk", "monitoring_type"
)
else:
agents = p.related_agents().only("pk")
for agent in agents:
agent.generate_tasks_from_policies()
return "ok"
@app.task(
acks_late=True,
retry_backoff=5,
retry_jitter=True,
retry_kwargs={"max_retries": 5},
)
def delete_policy_autotasks_task(task: int) -> str:
from autotasks.models import AutomatedTask
for t in AutomatedTask.objects.filter(parent_task=task):
t.delete_task_on_agent()
return "ok"
@app.task
def run_win_policy_autotasks_task(task: int) -> str:
from autotasks.models import AutomatedTask
for t in AutomatedTask.objects.filter(parent_task=task):
t.run_win_task()
return "ok"
@app.task(
acks_late=True,
retry_backoff=5,
retry_jitter=True,
retry_kwargs={"max_retries": 5},
)
def update_policy_autotasks_fields_task(task: int, update_agent: bool = False) -> str:
from autotasks.models import AutomatedTask
t = AutomatedTask.objects.get(pk=task)
update_fields: Dict[str, Any] = {}
for field in t.policy_fields_to_copy:
update_fields[field] = getattr(t, field)
AutomatedTask.objects.filter(parent_task=task).update(**update_fields)
if update_agent:
for t in AutomatedTask.objects.filter(parent_task=task).exclude(
sync_status="initial"
):
t.modify_task_on_agent()
try:
policy_task = AutomatedTask.objects.get(pk=task)
except AutomatedTask.DoesNotExist:
return "AutomatedTask not found"
if not policy_task.policy:
return "AutomatedTask must be a policy"
# get related agents from policy
for agent in policy_task.policy.related_agents():
policy_task.run_win_task(agent)
return "ok"

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
from django.urls import path
from autotasks.views import GetAddAutoTasks
from checks.views import GetAddChecks
from django.urls import path
from . import views
@@ -9,7 +10,6 @@ urlpatterns = [
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
path("policies/overview/", views.OverviewPolicy.as_view()),
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
path("sync/", views.PolicySync.as_view()),
# alias to get policy checks
path("policies/<int:policy>/checks/", GetAddChecks.as_view()),
# alias to get policy tasks

View File

@@ -1,18 +1,17 @@
from agents.models import Agent
from autotasks.models import AutomatedTask
from checks.models import Check
from clients.models import Client
from django.shortcuts import get_object_or_404
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from agents.models import Agent
from autotasks.models import TaskResult
from checks.models import CheckResult
from clients.models import Client
from tacticalrmm.permissions import _has_perm_on_client, _has_perm_on_site
from winupdate.models import WinUpdatePolicy
from winupdate.serializers import WinUpdatePolicySerializer
from tacticalrmm.permissions import _has_perm_on_client, _has_perm_on_site
from tacticalrmm.utils import notify_error
from .models import Policy
from .permissions import AutomationPolicyPerms
from .serializers import (
@@ -29,7 +28,9 @@ class GetAddPolicies(APIView):
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
def get(self, request):
policies = Policy.objects.all()
policies = Policy.objects.select_related("alert_template").prefetch_related(
"excluded_agents", "excluded_sites", "excluded_clients"
)
return Response(
PolicyTableSerializer(
@@ -51,9 +52,9 @@ class GetAddPolicies(APIView):
check.create_policy_check(policy=policy)
tasks = copyPolicy.autotasks.all()
for task in tasks:
task.create_policy_task(policy=policy)
if not task.assigned_check:
task.create_policy_task(policy=policy)
return Response("ok")
@@ -67,22 +68,12 @@ class GetUpdateDeletePolicy(APIView):
return Response(PolicySerializer(policy).data)
def put(self, request, pk):
from .tasks import generate_agent_checks_task
policy = get_object_or_404(Policy, pk=pk)
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
# check for excluding objects and in the request and if present generate policies
if (
"excluded_sites" in request.data.keys()
or "excluded_clients" in request.data.keys()
or "excluded_agents" in request.data.keys()
):
generate_agent_checks_task.delay(policy=pk, create_tasks=True)
return Response("ok")
def delete(self, request, pk):
@@ -91,25 +82,11 @@ class GetUpdateDeletePolicy(APIView):
return Response("ok")
class PolicySync(APIView):
def post(self, request):
if "policy" in request.data.keys():
from automation.tasks import generate_agent_checks_task
generate_agent_checks_task.delay(
policy=request.data["policy"], create_tasks=True
)
return Response("ok")
else:
return notify_error("The request was invalid")
class PolicyAutoTask(APIView):
# get status of all tasks
def get(self, request, task):
tasks = AutomatedTask.objects.filter(parent_task=task)
tasks = TaskResult.objects.filter(task=task)
return Response(PolicyTaskStatusSerializer(tasks, many=True).data)
# bulk run win tasks associated with policy
@@ -124,7 +101,7 @@ class PolicyCheck(APIView):
permission_classes = [IsAuthenticated, AutomationPolicyPerms]
def get(self, request, check):
checks = Check.objects.filter(parent_check=check)
checks = CheckResult.objects.filter(assigned_check=check)
return Response(PolicyCheckStatusSerializer(checks, many=True).data)
@@ -162,7 +139,7 @@ class UpdatePatchPolicy(APIView):
serializer = WinUpdatePolicySerializer(data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.policy = policy # type: ignore
serializer.policy = policy
serializer.save()
return Response("ok")
@@ -195,7 +172,7 @@ class ResetPatchPolicy(APIView):
raise PermissionDenied()
agents = (
Agent.objects.filter_by_role(request.user)
Agent.objects.filter_by_role(request.user) # type: ignore
.prefetch_related("winupdatepolicy")
.filter(site__client_id=request.data["client"])
)
@@ -204,13 +181,13 @@ class ResetPatchPolicy(APIView):
raise PermissionDenied()
agents = (
Agent.objects.filter_by_role(request.user)
Agent.objects.filter_by_role(request.user) # type: ignore
.prefetch_related("winupdatepolicy")
.filter(site_id=request.data["site"])
)
else:
agents = (
Agent.objects.filter_by_role(request.user)
Agent.objects.filter_by_role(request.user) # type: ignore
.prefetch_related("winupdatepolicy")
.only("pk")
)

View File

@@ -1,5 +1,6 @@
from django.contrib import admin
from .models import AutomatedTask
from .models import AutomatedTask, TaskResult
admin.site.register(AutomatedTask)
admin.site.register(TaskResult)

View File

@@ -1,10 +1,5 @@
from itertools import cycle
from model_bakery.recipe import Recipe, foreign_key, seq
script = Recipe("scripts.script")
from model_bakery.recipe import Recipe
task = Recipe(
"autotasks.AutomatedTask",
script=foreign_key(script),
)

View File

@@ -1,16 +1,13 @@
from agents.models import Agent
from autotasks.tasks import remove_orphaned_win_tasks
from django.core.management.base import BaseCommand
from autotasks.tasks import remove_orphaned_win_tasks
class Command(BaseCommand):
help = "Checks for orphaned tasks on all agents and removes them"
def handle(self, *args, **kwargs):
agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time")
online = [i for i in agents if i.status == "online"]
for agent in online:
remove_orphaned_win_tasks.delay(agent.pk)
remove_orphaned_win_tasks.s()
self.stdout.write(
self.style.SUCCESS(

View File

@@ -0,0 +1,99 @@
# Generated by Django 3.2.12 on 2022-04-01 22:44
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("checks", "0025_auto_20210917_1954"),
("agents", "0046_alter_agenthistory_command"),
("autotasks", "0029_alter_automatedtask_task_type"),
]
operations = [
migrations.RemoveField(
model_name="automatedtask",
name="retvalue",
),
migrations.AlterField(
model_name="automatedtask",
name="assigned_check",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="assignedtasks",
to="checks.check",
),
),
migrations.AlterField(
model_name="automatedtask",
name="win_task_name",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.CreateModel(
name="TaskResult",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("retcode", models.IntegerField(blank=True, null=True)),
("stdout", models.TextField(blank=True, null=True)),
("stderr", models.TextField(blank=True, null=True)),
("execution_time", models.CharField(default="0.0000", max_length=100)),
("last_run", models.DateTimeField(blank=True, null=True)),
(
"status",
models.CharField(
choices=[
("passing", "Passing"),
("failing", "Failing"),
("pending", "Pending"),
],
default="pending",
max_length=30,
),
),
(
"sync_status",
models.CharField(
choices=[
("synced", "Synced With Agent"),
("notsynced", "Waiting On Agent Checkin"),
("pendingdeletion", "Pending Deletion on Agent"),
("initial", "Initial Task Sync"),
],
default="initial",
max_length=100,
),
),
(
"agent",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="taskresults",
to="agents.agent",
),
),
(
"task",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="taskresults",
to="autotasks.automatedtask",
),
),
],
options={
"unique_together": {("agent", "task")},
},
),
]

View File

@@ -0,0 +1,50 @@
# Generated by Django 3.2.12 on 2022-04-01 22:49
from django.db import migrations, transaction
from django.db.utils import IntegrityError
def migrate_task_results(apps, schema_editor):
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
TaskResult = apps.get_model("autotasks", "TaskResult")
for task in AutomatedTask.objects.exclude(agent=None):
try:
with transaction.atomic():
if task.managed_by_policy:
TaskResult.objects.create(
task_id=task.parent_task,
agent_id=task.agent_id,
retcode=task.retcode,
stdout=task.stdout,
stderr=task.stderr,
execution_time=task.execution_time,
last_run=task.last_run,
status=task.status,
sync_status=task.sync_status,
)
else:
TaskResult.objects.create(
task_id=task.id,
agent_id=task.agent.id,
retcode=task.retcode,
stdout=task.stdout,
stderr=task.stderr,
execution_time=task.execution_time,
last_run=task.last_run,
status=task.status,
sync_status=task.sync_status,
)
except IntegrityError:
continue
class Migration(migrations.Migration):
atomic = False
dependencies = [
("autotasks", "0030_auto_20220401_2244"),
]
operations = [
migrations.RunPython(migrate_task_results),
]

View File

@@ -0,0 +1,45 @@
# Generated by Django 3.2.12 on 2022-04-01 23:01
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('autotasks', '0031_auto_20220401_2249'),
]
operations = [
migrations.RemoveField(
model_name='automatedtask',
name='execution_time',
),
migrations.RemoveField(
model_name='automatedtask',
name='last_run',
),
migrations.RemoveField(
model_name='automatedtask',
name='parent_task',
),
migrations.RemoveField(
model_name='automatedtask',
name='retcode',
),
migrations.RemoveField(
model_name='automatedtask',
name='status',
),
migrations.RemoveField(
model_name='automatedtask',
name='stderr',
),
migrations.RemoveField(
model_name='automatedtask',
name='stdout',
),
migrations.RemoveField(
model_name='automatedtask',
name='sync_status',
),
]

View File

@@ -0,0 +1,51 @@
# Generated by Django 3.2.12 on 2022-04-02 00:41
from django.db import migrations
from django.utils.timezone import make_aware
def migrate_script_data(apps, schema_editor):
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
# convert autotask to the new format
for task in AutomatedTask.objects.all():
try:
edited = False
# convert scheduled task_type
if task.task_type == "scheduled":
task.task_type = "daily"
task.run_time_date = make_aware(task.run_time_minute.strptime("%H:%M"))
task.daily_interval = 1
edited = True
# convert actions
if not task.actions:
if not task.script:
task.delete()
task.actions = [
{
"type": "script",
"script": task.script.pk,
"script_args": task.script_args,
"timeout": task.timeout,
"name": task.script.name,
}
]
edited = True
if edited:
task.save()
except:
continue
class Migration(migrations.Migration):
dependencies = [
("autotasks", "0032_auto_20220401_2301"),
]
operations = [
migrations.RunPython(migrate_script_data),
]

View File

@@ -0,0 +1,25 @@
# Generated by Django 3.2.12 on 2022-04-02 00:46
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('autotasks', '0033_auto_20220402_0041'),
]
operations = [
migrations.RemoveField(
model_name='automatedtask',
name='script',
),
migrations.RemoveField(
model_name='automatedtask',
name='script_args',
),
migrations.RemoveField(
model_name='automatedtask',
name='timeout',
),
]

View File

@@ -0,0 +1,36 @@
# Generated by Django 4.0.3 on 2022-04-15 18:18
from django.db import migrations
from django.db.models import Count
from autotasks.models import generate_task_name
def check_for_win_task_name_duplicates(apps, schema_editor):
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
TaskResult = apps.get_model("autotasks", "TaskResult")
duplicate_tasks = (
AutomatedTask.objects.values("win_task_name")
.annotate(records=Count("win_task_name"))
.filter(records__gt=1)
)
for task in duplicate_tasks:
dups = list(AutomatedTask.objects.filter(win_task_name=task["win_task_name"]))
for x in range(task["records"] - 1):
dups[x].win_task_name = generate_task_name()
dups[x].save(update_fields=["win_task_name"])
# update task_result sync status
TaskResult.objects.filter(task=dups[x]).update(sync_status="notsynced")
class Migration(migrations.Migration):
dependencies = [
("autotasks", "0034_auto_20220402_0046"),
]
operations = [
migrations.RunPython(check_for_win_task_name_duplicates),
]

View File

@@ -0,0 +1,20 @@
# Generated by Django 4.0.3 on 2022-04-15 20:52
from django.db import migrations, models
import autotasks.models
class Migration(migrations.Migration):
dependencies = [
('autotasks', '0035_auto_20220415_1818'),
]
operations = [
migrations.AlterField(
model_name='automatedtask',
name='win_task_name',
field=models.CharField(blank=True, default=autotasks.models.generate_task_name, max_length=255, unique=True),
),
]

View File

@@ -1,20 +1,31 @@
import asyncio
import datetime as dt
import random
import string
from typing import List
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
import pytz
from alerts.models import SEVERITY_CHOICES
from django.contrib.postgres.fields import ArrayField
from django.core.cache import cache
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.db.models.fields import DateTimeField
from django.db.models.fields.json import JSONField
from django.db.utils import DatabaseError
from django.utils import timezone as djangotime
from alerts.models import SEVERITY_CHOICES
from core.utils import get_core_settings
from logs.models import BaseAuditModel, DebugLog
from packaging import version as pyver
from tacticalrmm.constants import (
FIELDS_TRIGGER_TASK_UPDATE_AGENT,
POLICY_TASK_FIELDS_TO_COPY,
DebugLogType,
)
if TYPE_CHECKING:
from automation.models import Policy
from alerts.models import Alert, AlertTemplate
from agents.models import Agent
from checks.models import Check
from tacticalrmm.models import PermissionQuerySet
from tacticalrmm.utils import (
@@ -50,6 +61,11 @@ TASK_STATUS_CHOICES = [
]
def generate_task_name() -> str:
chars = string.ascii_letters
return "TacticalRMM_" + "".join(random.choice(chars) for i in range(35))
class AutomatedTask(BaseAuditModel):
objects = PermissionQuerySet.as_manager()
@@ -75,51 +91,19 @@ class AutomatedTask(BaseAuditModel):
on_delete=models.SET_NULL,
)
# deprecated
script = models.ForeignKey(
"scripts.Script",
null=True,
blank=True,
related_name="autoscript",
on_delete=models.SET_NULL,
)
# deprecated
script_args = ArrayField(
models.CharField(max_length=255, null=True, blank=True),
null=True,
blank=True,
default=list,
)
# deprecated
timeout = models.PositiveIntegerField(blank=True, default=120)
# format -> {"actions": [{"type": "script", "script": 1, "name": "Script Name", "timeout": 90, "script_args": []}, {"type": "cmd", "command": "whoami", "timeout": 90}]}
# format -> [{"type": "script", "script": 1, "name": "Script Name", "timeout": 90, "script_args": []}, {"type": "cmd", "command": "whoami", "timeout": 90}]
actions = JSONField(default=list)
assigned_check = models.ForeignKey(
"checks.Check",
null=True,
blank=True,
related_name="assignedtask",
related_name="assignedtasks",
on_delete=models.SET_NULL,
)
name = models.CharField(max_length=255)
collector_all_output = models.BooleanField(default=False)
managed_by_policy = models.BooleanField(default=False)
parent_task = models.PositiveIntegerField(null=True, blank=True)
retvalue = models.TextField(null=True, blank=True)
retcode = models.IntegerField(null=True, blank=True)
stdout = models.TextField(null=True, blank=True)
stderr = models.TextField(null=True, blank=True)
execution_time = models.CharField(max_length=100, default="0.0000")
last_run = models.DateTimeField(null=True, blank=True)
enabled = models.BooleanField(default=True)
continue_on_error = models.BooleanField(default=True)
status = models.CharField(
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
)
sync_status = models.CharField(
max_length=100, choices=SYNC_STATUS_CHOICES, default="initial"
)
alert_severity = models.CharField(
max_length=30, choices=SEVERITY_CHOICES, default="info"
)
@@ -132,7 +116,9 @@ class AutomatedTask(BaseAuditModel):
task_type = models.CharField(
max_length=100, choices=TASK_TYPE_CHOICES, default="manual"
)
win_task_name = models.CharField(max_length=255, null=True, blank=True)
win_task_name = models.CharField(
max_length=255, unique=True, blank=True, default=generate_task_name
) # should be changed to unique=True
run_time_date = DateTimeField(null=True, blank=True)
expire_date = DateTimeField(null=True, blank=True)
@@ -166,43 +152,53 @@ class AutomatedTask(BaseAuditModel):
run_asap_after_missed = models.BooleanField(default=False) # added in agent v1.4.7
task_instance_policy = models.PositiveSmallIntegerField(blank=True, default=1)
def __str__(self):
# deprecated
managed_by_policy = models.BooleanField(default=False)
# non-database property
task_result: "Union[TaskResult, Dict[None, None]]" = {}
def __str__(self) -> str:
return self.name
def save(self, *args, **kwargs):
from automation.tasks import update_policy_autotasks_fields_task
from autotasks.tasks import modify_win_task
def save(self, *args, **kwargs) -> None:
# get old agent if exists
# if task is a policy task clear cache on everything
if self.policy:
cache.delete_many_pattern("site_*_tasks")
cache.delete_many_pattern("agent_*_tasks")
# get old task if exists
old_task = AutomatedTask.objects.get(pk=self.pk) if self.pk else None
super(AutomatedTask, self).save(old_model=old_task, *args, **kwargs)
# check if fields were updated that require a sync to the agent
update_agent = False
# check if fields were updated that require a sync to the agent and set status to notsynced
if old_task:
for field in self.fields_that_trigger_task_update_on_agent:
if getattr(self, field) != getattr(old_task, field):
update_agent = True
break
if self.policy:
TaskResult.objects.exclude(sync_status="initial").filter(
task__policy_id=self.policy.id
).update(sync_status="notsynced")
else:
TaskResult.objects.filter(agent=self.agent, task=self).update(
sync_status="notsynced"
)
# check if automated task was enabled/disabled and send celery task
if old_task and old_task.agent and update_agent:
modify_win_task.delay(pk=self.pk)
def delete(self, *args, **kwargs):
# check if policy task was edited and then check if it was a field worth copying to rest of agent tasks
elif old_task and old_task.policy:
if update_agent:
update_policy_autotasks_fields_task.delay(
task=self.pk, update_agent=update_agent
)
else:
for field in self.policy_fields_to_copy:
if getattr(self, field) != getattr(old_task, field):
update_policy_autotasks_fields_task.delay(task=self.pk)
break
# if task is a policy task clear cache on everything
if self.policy:
cache.delete_many_pattern("site_*_tasks")
cache.delete_many_pattern("agent_*_tasks")
super(AutomatedTask, self).delete(
*args,
**kwargs,
)
@property
def schedule(self):
def schedule(self) -> Optional[str]:
if self.task_type == "manual":
return "Manual"
elif self.task_type == "checkfailure":
@@ -234,76 +230,9 @@ class AutomatedTask(BaseAuditModel):
days = bitdays_to_string(self.run_time_bit_weekdays)
return f"Runs on {months} on {weeks} on {days} at {run_time_nice}"
@property
def last_run_as_timezone(self):
if self.last_run is not None and self.agent is not None:
return self.last_run.astimezone(
pytz.timezone(self.agent.timezone)
).strftime("%b-%d-%Y - %H:%M")
return self.last_run
# These fields will be duplicated on the agent tasks that are managed by a policy
@property
def policy_fields_to_copy(self) -> List[str]:
return [
"alert_severity",
"email_alert",
"text_alert",
"dashboard_alert",
"assigned_check",
"name",
"actions",
"run_time_bit_weekdays",
"run_time_date",
"expire_date",
"daily_interval",
"weekly_interval",
"task_type",
"win_task_name",
"enabled",
"remove_if_not_scheduled",
"run_asap_after_missed",
"custom_field",
"collector_all_output",
"monthly_days_of_month",
"monthly_months_of_year",
"monthly_weeks_of_month",
"task_repetition_duration",
"task_repetition_interval",
"stop_task_at_duration_end",
"random_task_delay",
"run_asap_after_missed",
"task_instance_policy",
"continue_on_error",
]
@property
def fields_that_trigger_task_update_on_agent(self) -> List[str]:
return [
"run_time_bit_weekdays",
"run_time_date",
"expire_date",
"daily_interval",
"weekly_interval",
"enabled",
"remove_if_not_scheduled",
"run_asap_after_missed",
"monthly_days_of_month",
"monthly_months_of_year",
"monthly_weeks_of_month",
"task_repetition_duration",
"task_repetition_interval",
"stop_task_at_duration_end",
"random_task_delay",
"run_asap_after_missed",
"task_instance_policy",
]
@staticmethod
def generate_task_name():
chars = string.ascii_letters
return "TacticalRMM_" + "".join(random.choice(chars) for i in range(35))
return FIELDS_TRIGGER_TASK_UPDATE_AGENT
@staticmethod
def serialize(task):
@@ -312,46 +241,26 @@ class AutomatedTask(BaseAuditModel):
return TaskAuditSerializer(task).data
def create_policy_task(self, agent=None, policy=None, assigned_check=None):
# added to allow new policy tasks to be assigned to check only when the agent check exists already
if (
self.assigned_check
and agent
and agent.agentchecks.filter(parent_check=self.assigned_check.id).exists()
):
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.id)
# if policy is present, then this task is being copied to another policy
# if agent is present, then this task is being created on an agent from a policy
# exit if neither are set or if both are set
# also exit if assigned_check is set because this task will be created when the check is
if (
(not agent and not policy)
or (agent and policy)
or (self.assigned_check and not assigned_check)
):
return
def create_policy_task(
self, policy: "Policy", assigned_check: "Optional[Check]" = None
) -> None:
### Copies certain properties on this task (self) to a new task and sets it to the supplied Policy
fields_to_copy = POLICY_TASK_FIELDS_TO_COPY
task = AutomatedTask.objects.create(
agent=agent,
policy=policy,
managed_by_policy=bool(agent),
parent_task=(self.pk if agent else None),
assigned_check=assigned_check,
)
for field in self.policy_fields_to_copy:
if field != "assigned_check":
setattr(task, field, getattr(self, field))
for field in fields_to_copy:
setattr(task, field, getattr(self, field))
task.save()
if agent:
task.create_task_on_agent()
# agent version >= 1.8.0
def generate_nats_task_payload(self, editing=False):
def generate_nats_task_payload(
self, agent: "Optional[Agent]" = None, editing: bool = False
) -> Dict[str, Any]:
task = {
"pk": self.pk,
"type": "rmm",
@@ -371,6 +280,18 @@ class AutomatedTask(BaseAuditModel):
}
if self.task_type in ["runonce", "daily", "weekly", "monthly", "monthlydow"]:
# set runonce task in future if creating and run_asap_after_missed is set
if (
not editing
and self.task_type == "runonce"
and self.run_asap_after_missed
and agent
and self.run_time_date
< djangotime.now().astimezone(pytz.timezone(agent.timezone))
):
self.run_time_date = (
djangotime.now() + djangotime.timedelta(minutes=5)
).astimezone(pytz.timezone(agent.timezone))
task["start_year"] = int(self.run_time_date.strftime("%Y"))
task["start_month"] = int(self.run_time_date.strftime("%-m"))
@@ -423,215 +344,145 @@ class AutomatedTask(BaseAuditModel):
return task
def create_task_on_agent(self):
from agents.models import Agent
agent = (
Agent.objects.filter(pk=self.agent.pk)
.only("pk", "version", "hostname", "agent_id")
.get()
)
if pyver.parse(agent.version) >= pyver.parse("1.8.0"):
nats_data = {
"func": "schedtask",
"schedtaskpayload": self.generate_nats_task_payload(),
}
def create_task_on_agent(self, agent: "Optional[Agent]" = None) -> str:
if self.policy and not agent:
return "agent parameter needs to be passed with policy task"
else:
agent = agent if self.policy else self.agent
if self.task_type == "scheduled":
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "weekly",
"weekdays": self.run_time_bit_weekdays,
"pk": self.pk,
"name": self.win_task_name,
"hour": dt.datetime.strptime(
self.run_time_minute, "%H:%M"
).hour,
"min": dt.datetime.strptime(
self.run_time_minute, "%H:%M"
).minute,
},
}
try:
task_result = TaskResult.objects.get(agent=agent, task=self)
except TaskResult.DoesNotExist:
task_result = TaskResult(agent=agent, task=self)
task_result.save()
elif self.task_type == "runonce":
# check if scheduled time is in the past
agent_tz = pytz.timezone(agent.timezone)
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
pytz.utc
)
now = djangotime.now()
if task_time_utc < now:
self.run_time_date = now.astimezone(agent_tz).replace(
tzinfo=pytz.utc
) + djangotime.timedelta(minutes=5)
self.save(update_fields=["run_time_date"])
nats_data = {
"func": "schedtask",
"schedtaskpayload": self.generate_nats_task_payload(agent),
}
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "once",
"pk": self.pk,
"name": self.win_task_name,
"year": int(dt.datetime.strftime(self.run_time_date, "%Y")),
"month": dt.datetime.strftime(self.run_time_date, "%B"),
"day": int(dt.datetime.strftime(self.run_time_date, "%d")),
"hour": int(dt.datetime.strftime(self.run_time_date, "%H")),
"min": int(dt.datetime.strftime(self.run_time_date, "%M")),
},
}
if self.run_asap_after_missed:
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
if self.remove_if_not_scheduled:
nats_data["schedtaskpayload"]["deleteafter"] = True
elif self.task_type == "checkfailure" or self.task_type == "manual":
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "manual",
"pk": self.pk,
"name": self.win_task_name,
},
}
else:
return "error"
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5))
if r != "ok":
self.sync_status = "initial"
self.save(update_fields=["sync_status"])
task_result.sync_status = "initial"
task_result.save(update_fields=["sync_status"])
DebugLog.warning(
agent=agent,
log_type="agent_issues",
message=f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in.",
log_type=DebugLogType.AGENT_ISSUES,
message=f"Unable to create scheduled task {self.name} on {task_result.agent.hostname}. It will be created when the agent checks in.",
)
return "timeout"
else:
self.sync_status = "synced"
self.save(update_fields=["sync_status"])
task_result.sync_status = "synced"
task_result.save(update_fields=["sync_status"])
DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"{agent.hostname} task {self.name} was successfully created",
log_type=DebugLogType.AGENT_ISSUES,
message=f"{task_result.agent.hostname} task {self.name} was successfully created",
)
return "ok"
def modify_task_on_agent(self):
from agents.models import Agent
agent = (
Agent.objects.filter(pk=self.agent.pk)
.only("pk", "version", "hostname", "agent_id")
.get()
)
if pyver.parse(agent.version) >= pyver.parse("1.8.0"):
nats_data = {
"func": "schedtask",
"schedtaskpayload": self.generate_nats_task_payload(editing=True),
}
def modify_task_on_agent(self, agent: "Optional[Agent]" = None) -> str:
if self.policy and not agent:
return "agent parameter needs to be passed with policy task"
else:
nats_data = {
"func": "enableschedtask",
"schedtaskpayload": {
"name": self.win_task_name,
"enabled": self.enabled,
},
}
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
agent = agent if self.policy else self.agent
try:
task_result = TaskResult.objects.get(agent=agent, task=self)
except TaskResult.DoesNotExist:
task_result = TaskResult(agent=agent, task=self)
task_result.save()
nats_data = {
"func": "schedtask",
"schedtaskpayload": self.generate_nats_task_payload(editing=True),
}
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=5))
if r != "ok":
self.sync_status = "notsynced"
self.save(update_fields=["sync_status"])
task_result.sync_status = "notsynced"
task_result.save(update_fields=["sync_status"])
DebugLog.warning(
agent=agent,
log_type="agent_issues",
message=f"Unable to modify scheduled task {self.name} on {agent.hostname}({agent.pk}). It will try again on next agent checkin",
log_type=DebugLogType.AGENT_ISSUES,
message=f"Unable to modify scheduled task {self.name} on {task_result.agent.hostname}({task_result.agent.agent_id}). It will try again on next agent checkin",
)
return "timeout"
else:
self.sync_status = "synced"
self.save(update_fields=["sync_status"])
task_result.sync_status = "synced"
task_result.save(update_fields=["sync_status"])
DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"{agent.hostname} task {self.name} was successfully modified",
log_type=DebugLogType.AGENT_ISSUES,
message=f"{task_result.agent.hostname} task {self.name} was successfully modified",
)
return "ok"
def delete_task_on_agent(self):
from agents.models import Agent
def delete_task_on_agent(self, agent: "Optional[Agent]" = None) -> str:
if self.policy and not agent:
return "agent parameter needs to be passed with policy task"
else:
agent = agent if self.policy else self.agent
agent = (
Agent.objects.filter(pk=self.agent.pk)
.only("pk", "version", "hostname", "agent_id")
.get()
)
try:
task_result = TaskResult.objects.get(agent=agent, task=self)
except TaskResult.DoesNotExist:
task_result = TaskResult(agent=agent, task=self)
task_result.save()
nats_data = {
"func": "delschedtask",
"schedtaskpayload": {"name": self.win_task_name},
}
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
r = asyncio.run(task_result.agent.nats_cmd(nats_data, timeout=10))
if r != "ok" and "The system cannot find the file specified" not in r:
self.sync_status = "pendingdeletion"
task_result.sync_status = "pendingdeletion"
try:
self.save(update_fields=["sync_status"])
task_result.save(update_fields=["sync_status"])
except DatabaseError:
pass
DebugLog.warning(
agent=agent,
log_type="agent_issues",
message=f"{agent.hostname} task {self.name} will be deleted on next checkin",
log_type=DebugLogType.AGENT_ISSUES,
message=f"{task_result.agent.hostname} task {self.name} will be deleted on next checkin",
)
return "timeout"
else:
self.delete()
DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"{agent.hostname}({agent.pk}) task {self.name} was deleted",
log_type=DebugLogType.AGENT_ISSUES,
message=f"{task_result.agent.hostname}({task_result.agent.agent_id}) task {self.name} was deleted",
)
return "ok"
def run_win_task(self):
from agents.models import Agent
def run_win_task(self, agent: "Optional[Agent]" = None) -> str:
if self.policy and not agent:
return "agent parameter needs to be passed with policy task"
else:
agent = agent if self.policy else self.agent
agent = (
Agent.objects.filter(pk=self.agent.pk)
.only("pk", "version", "hostname", "agent_id")
.get()
try:
task_result = TaskResult.objects.get(agent=agent, task=self)
except TaskResult.DoesNotExist:
task_result = TaskResult(agent=agent, task=self)
task_result.save()
asyncio.run(
task_result.agent.nats_cmd(
{"func": "runtask", "taskpk": self.pk}, wait=False
)
)
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
return "ok"
def save_collector_results(self):
agent_field = self.custom_field.get_or_create_field_value(self.agent)
value = (
self.stdout.strip()
if self.collector_all_output
else self.stdout.strip().split("\n")[-1].strip()
)
agent_field.save_to_field(value)
def should_create_alert(self, alert_template=None):
return (
self.dashboard_alert
@@ -647,10 +498,64 @@ class AutomatedTask(BaseAuditModel):
)
)
def send_email(self):
from core.models import CoreSettings
CORE = CoreSettings.objects.first()
class TaskResult(models.Model):
class Meta:
unique_together = (("agent", "task"),)
objects = PermissionQuerySet.as_manager()
agent = models.ForeignKey(
"agents.Agent",
related_name="taskresults",
on_delete=models.CASCADE,
)
task = models.ForeignKey(
"autotasks.AutomatedTask",
related_name="taskresults",
on_delete=models.CASCADE,
)
retcode = models.IntegerField(null=True, blank=True)
stdout = models.TextField(null=True, blank=True)
stderr = models.TextField(null=True, blank=True)
execution_time = models.CharField(max_length=100, default="0.0000")
last_run = models.DateTimeField(null=True, blank=True)
status = models.CharField(
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
)
sync_status = models.CharField(
max_length=100, choices=SYNC_STATUS_CHOICES, default="initial"
)
def __str__(self):
return f"{self.agent.hostname} - {self.task}"
def get_or_create_alert_if_needed(
self, alert_template: "Optional[AlertTemplate]"
) -> "Optional[Alert]":
from alerts.models import Alert
return Alert.create_or_return_task_alert(
self.task,
agent=self.agent,
skip_create=not self.task.should_create_alert(alert_template),
)
def save_collector_results(self) -> None:
agent_field = self.task.custom_field.get_or_create_field_value(self.agent)
value = (
self.stdout.strip()
if self.task.collector_all_output
else self.stdout.strip().split("\n")[-1].strip()
)
agent_field.save_to_field(value)
def send_email(self):
CORE = get_core_settings()
# Format of Email sent when Task has email alert
if self.agent:
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
@@ -662,12 +567,11 @@ class AutomatedTask(BaseAuditModel):
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
)
CORE.send_mail(subject, body, self.agent.alert_template) # type: ignore
CORE.send_mail(subject, body, self.agent.alert_template)
def send_sms(self):
from core.models import CoreSettings
CORE = get_core_settings()
CORE = CoreSettings.objects.first()
# Format of SMS sent when Task has SMS alert
if self.agent:
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - {self} Failed"
@@ -679,27 +583,24 @@ class AutomatedTask(BaseAuditModel):
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
)
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
CORE.send_sms(body, alert_template=self.agent.alert_template)
def send_resolved_email(self):
from core.models import CoreSettings
CORE = get_core_settings()
CORE = CoreSettings.objects.first()
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
body = (
subject
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
)
CORE.send_mail(subject, body, alert_template=self.agent.alert_template) # type: ignore
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
def send_resolved_sms(self):
from core.models import CoreSettings
CORE = CoreSettings.objects.first()
CORE = get_core_settings()
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
body = (
subject
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
)
CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
CORE.send_sms(body, alert_template=self.agent.alert_template)

View File

@@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
class AutoTaskPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
if "agent_id" in view.kwargs.keys():
return _has_perm(r, "can_list_autotasks") and _has_perm_on_agent(
@@ -17,5 +17,5 @@ class AutoTaskPerms(permissions.BasePermission):
class RunAutoTaskPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_run_autotasks")

View File

@@ -1,20 +1,32 @@
from rest_framework import serializers
from scripts.models import Script
from django.core.exceptions import ObjectDoesNotExist
from .models import AutomatedTask
from scripts.models import Script
from .models import AutomatedTask, TaskResult
class TaskResultSerializer(serializers.ModelSerializer):
class Meta:
model = TaskResult
fields = "__all__"
read_only_fields = ("agent", "task")
class TaskSerializer(serializers.ModelSerializer):
check_name = serializers.ReadOnlyField(source="assigned_check.readable_desc")
schedule = serializers.ReadOnlyField()
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
alert_template = serializers.SerializerMethodField()
run_time_date = serializers.DateTimeField(format="iso-8601", required=False)
expire_date = serializers.DateTimeField(
format="iso-8601", allow_null=True, required=False
)
run_time_date = serializers.DateTimeField(required=False)
expire_date = serializers.DateTimeField(allow_null=True, required=False)
task_result = serializers.SerializerMethodField()
def get_task_result(self, obj):
return (
TaskResultSerializer(obj.task_result).data
if isinstance(obj.task_result, TaskResult)
else {}
)
def validate_actions(self, value):
@@ -187,13 +199,14 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
def get_task_actions(self, obj):
tmp = []
actions_to_remove = []
agent = self.context["agent"]
for action in obj.actions:
if action["type"] == "cmd":
tmp.append(
{
"type": "cmd",
"command": Script.parse_script_args(
agent=obj.agent,
agent=agent,
shell=action["shell"],
args=[action["command"]],
)[0],
@@ -204,7 +217,7 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
elif action["type"] == "script":
try:
script = Script.objects.get(pk=action["script"])
except ObjectDoesNotExist:
except Script.DoesNotExist:
# script doesn't exist so remove it
actions_to_remove.append(action["script"])
continue
@@ -214,7 +227,7 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
"script_name": script.name,
"code": script.code,
"script_args": Script.parse_script_args(
agent=obj.agent,
agent=agent,
shell=script.shell,
args=action["script_args"],
),
@@ -241,12 +254,6 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
fields = ["id", "continue_on_error", "enabled", "task_actions"]
class TaskRunnerPatchSerializer(serializers.ModelSerializer):
class Meta:
model = AutomatedTask
fields = "__all__"
class TaskAuditSerializer(serializers.ModelSerializer):
class Meta:
model = AutomatedTask

View File

@@ -2,121 +2,145 @@ import asyncio
import datetime as dt
import random
from time import sleep
from typing import Union
from typing import Optional, Union
from autotasks.models import AutomatedTask
from django.utils import timezone as djangotime
from agents.models import Agent
from alerts.models import Alert
from autotasks.models import AutomatedTask, TaskResult
from logs.models import DebugLog
from tacticalrmm.celery import app
from tacticalrmm.constants import DebugLogType
@app.task
def create_win_task_schedule(pk):
task = AutomatedTask.objects.get(pk=pk)
def create_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str:
try:
task = AutomatedTask.objects.get(pk=pk)
task.create_task_on_agent()
if agent_id:
task.create_task_on_agent(Agent.objects.get(agent_id=agent_id))
else:
task.create_task_on_agent()
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
pass
return "ok"
@app.task
def modify_win_task(pk):
task = AutomatedTask.objects.get(pk=pk)
def modify_win_task(pk: int, agent_id: Optional[str] = None) -> str:
try:
task = AutomatedTask.objects.get(pk=pk)
task.modify_task_on_agent()
if agent_id:
task.modify_task_on_agent(Agent.objects.get(agent_id=agent_id))
else:
task.modify_task_on_agent()
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
pass
return "ok"
@app.task
def delete_win_task_schedule(pk):
task = AutomatedTask.objects.get(pk=pk)
def delete_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str:
try:
task = AutomatedTask.objects.get(pk=pk)
if agent_id:
task.delete_task_on_agent(Agent.objects.get(agent_id=agent_id))
else:
task.delete_task_on_agent()
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
pass
task.delete_task_on_agent()
return "ok"
@app.task
def run_win_task(pk):
task = AutomatedTask.objects.get(pk=pk)
task.run_win_task()
def run_win_task(pk: int, agent_id: Optional[str] = None) -> str:
try:
task = AutomatedTask.objects.get(pk=pk)
if agent_id:
task.run_win_task(Agent.objects.get(agent_id=agent_id))
else:
task.run_win_task()
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
pass
return "ok"
@app.task
def remove_orphaned_win_tasks(agentpk):
def remove_orphaned_win_tasks() -> None:
from agents.models import Agent
agent = Agent.objects.get(pk=agentpk)
for agent in Agent.online_agents():
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"Orphaned task cleanup initiated on {agent.hostname}.",
)
if not isinstance(r, list): # empty list
DebugLog.error(
agent=agent,
log_type=DebugLogType.AGENT_ISSUES,
message=f"Unable to pull list of scheduled tasks on {agent.hostname}: {r}",
)
return
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
agent_task_names = [
task.win_task_name for task in agent.get_tasks_with_policies()
]
if not isinstance(r, list) and not r: # empty list
DebugLog.error(
agent=agent,
log_type="agent_issues",
message=f"Unable to clean up scheduled tasks on {agent.hostname}: {r}",
exclude_tasks = (
"TacticalRMM_fixmesh",
"TacticalRMM_SchedReboot",
"TacticalRMM_sync",
"TacticalRMM_agentupdate",
)
return "notlist"
agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True))
for task in r:
if task.startswith(exclude_tasks):
# skip system tasks or any pending reboots
continue
exclude_tasks = (
"TacticalRMM_fixmesh",
"TacticalRMM_SchedReboot",
"TacticalRMM_sync",
"TacticalRMM_agentupdate",
)
for task in r:
if task.startswith(exclude_tasks):
# skip system tasks or any pending reboots
continue
if task.startswith("TacticalRMM_") and task not in agent_task_names:
# delete task since it doesn't exist in UI
nats_data = {
"func": "delschedtask",
"schedtaskpayload": {"name": task},
}
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
if ret != "ok":
DebugLog.error(
agent=agent,
log_type="agent_issues",
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
)
else:
DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"Removed orphaned task {task} from {agent.hostname}",
)
DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"Orphaned task cleanup finished on {agent.hostname}",
)
if task.startswith("TacticalRMM_") and task not in agent_task_names:
# delete task since it doesn't exist in UI
nats_data = {
"func": "delschedtask",
"schedtaskpayload": {"name": task},
}
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
if ret != "ok":
DebugLog.error(
agent=agent,
log_type=DebugLogType.AGENT_ISSUES,
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
)
else:
DebugLog.info(
agent=agent,
log_type=DebugLogType.AGENT_ISSUES,
message=f"Removed orphaned task {task} from {agent.hostname}",
)
@app.task
def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending email
if not alert.email_sent:
sleep(random.randint(1, 10))
alert.assigned_task.send_email()
task_result = TaskResult.objects.get(
task=alert.assigned_task, agent=alert.agent
)
sleep(random.randint(1, 5))
task_result.send_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
else:
@@ -124,8 +148,11 @@ def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None)
# send an email only if the last email sent is older than alert interval
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.email_sent < delta:
sleep(random.randint(1, 10))
alert.assigned_task.send_email()
task_result = TaskResult.objects.get(
task=alert.assigned_task, agent=alert.agent
)
sleep(random.randint(1, 5))
task_result.send_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
@@ -134,14 +161,19 @@ def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None)
@app.task
def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending text
if not alert.sms_sent:
task_result = TaskResult.objects.get(
task=alert.assigned_task, agent=alert.agent
)
sleep(random.randint(1, 3))
alert.assigned_task.send_sms()
task_result.send_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
else:
@@ -149,8 +181,11 @@ def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) ->
# send a text only if the last text sent is older than alert interval
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.sms_sent < delta:
task_result = TaskResult.objects.get(
task=alert.assigned_task, agent=alert.agent
)
sleep(random.randint(1, 3))
alert.assigned_task.send_sms()
task_result.send_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
@@ -159,14 +194,19 @@ def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) ->
@app.task
def handle_resolved_task_sms_alert(pk: int) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending text
if not alert.resolved_sms_sent:
task_result = TaskResult.objects.get(
task=alert.assigned_task, agent=alert.agent
)
sleep(random.randint(1, 3))
alert.assigned_task.send_resolved_sms()
task_result.send_resolved_sms()
alert.resolved_sms_sent = djangotime.now()
alert.save(update_fields=["resolved_sms_sent"])
@@ -175,14 +215,19 @@ def handle_resolved_task_sms_alert(pk: int) -> str:
@app.task
def handle_resolved_task_email_alert(pk: int) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending email
if not alert.resolved_email_sent:
sleep(random.randint(1, 10))
alert.assigned_task.send_resolved_email()
task_result = TaskResult.objects.get(
task=alert.assigned_task, agent=alert.agent
)
sleep(random.randint(1, 5))
task_result.send_resolved_email()
alert.resolved_email_sent = djangotime.now()
alert.save(update_fields=["resolved_email_sent"])

View File

@@ -1,4 +1,3 @@
import datetime as dt
from unittest.mock import call, patch
from django.utils import timezone as djangotime
@@ -6,7 +5,7 @@ from model_bakery import baker
from tacticalrmm.test import TacticalTestCase
from .models import AutomatedTask
from .models import AutomatedTask, TaskResult
from .serializers import TaskSerializer
from .tasks import create_win_task_schedule, remove_orphaned_win_tasks, run_win_task
@@ -44,11 +43,8 @@ class TestAutotaskViews(TacticalTestCase):
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 4)
@patch("automation.tasks.generate_agent_autotasks_task.delay")
@patch("autotasks.tasks.create_win_task_schedule.delay")
def test_add_autotask(
self, create_win_task_schedule, generate_agent_autotasks_task
):
def test_add_autotask(self, create_win_task_schedule):
url = f"{base_url}/"
# setup data
@@ -238,20 +234,6 @@ class TestAutotaskViews(TacticalTestCase):
create_win_task_schedule.assert_called()
create_win_task_schedule.reset_mock()
# test add task to policy
data = {
"policy": policy.id, # type: ignore
"name": "Test Task Manual",
"enabled": True,
"task_type": "manual",
"actions": actions,
}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
generate_agent_autotasks_task.assert_called_with(policy=policy.id) # type: ignore
self.check_not_authenticated("post", url)
def test_get_autotask(self):
@@ -266,15 +248,11 @@ class TestAutotaskViews(TacticalTestCase):
serializer = TaskSerializer(task)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, serializer.data) # type: ignore
self.assertEqual(resp.data, serializer.data)
self.check_not_authenticated("get", url)
@patch("autotasks.tasks.modify_win_task.delay")
@patch("automation.tasks.update_policy_autotasks_fields_task.delay")
def test_update_autotask(
self, update_policy_autotasks_fields_task, modify_win_task
):
def test_update_autotask(self):
# setup data
agent = baker.make_recipe("agents.agent")
agent_task = baker.make("autotasks.AutomatedTask", agent=agent)
@@ -292,22 +270,19 @@ class TestAutotaskViews(TacticalTestCase):
resp = self.client.put(f"{base_url}/500/", format="json")
self.assertEqual(resp.status_code, 404)
url = f"{base_url}/{agent_task.id}/" # type: ignore
url = f"{base_url}/{agent_task.id}/"
# test editing agent task with no task update
data = {"name": "New Name"}
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
modify_win_task.not_called() # type: ignore
# test editing agent task with agent task update
data = {"enabled": False}
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
modify_win_task.assert_called_with(pk=agent_task.id) # type: ignore
modify_win_task.reset_mock()
# test editing agent task with task_type
data = {
@@ -323,13 +298,11 @@ class TestAutotaskViews(TacticalTestCase):
"repetition_duration": "1H",
"random_task_delay": "5M",
"custom_field": custom_field.id,
"run_asap_afteR_missed": False,
"run_asap_after_missed": False,
}
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
modify_win_task.assert_called_with(pk=agent_task.id) # type: ignore
modify_win_task.reset_mock()
# test trying to edit with empty actions
data = {
@@ -349,35 +322,12 @@ class TestAutotaskViews(TacticalTestCase):
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 400)
modify_win_task.assert_not_called # type: ignore
# test editing policy tasks
url = f"{base_url}/{policy_task.id}/" # type: ignore
# test editing policy task
data = {"enabled": False}
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
update_policy_autotasks_fields_task.assert_called_with(
task=policy_task.id, update_agent=True # type: ignore
)
update_policy_autotasks_fields_task.reset_mock()
# test editing policy task with no agent update
data = {"name": "New Name"}
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
update_policy_autotasks_fields_task.assert_called_with(task=policy_task.id)
self.check_not_authenticated("put", url)
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
@patch("autotasks.tasks.delete_win_task_schedule.delay")
@patch("automation.tasks.delete_policy_autotasks_task.delay")
def test_delete_autotask(
self, delete_policy_autotasks_task, delete_win_task_schedule
):
def test_delete_autotask(self, delete_win_task_schedule, remove_orphaned_win_tasks):
# setup data
agent = baker.make_recipe("agents.agent")
agent_task = baker.make("autotasks.AutomatedTask", agent=agent)
@@ -389,17 +339,20 @@ class TestAutotaskViews(TacticalTestCase):
self.assertEqual(resp.status_code, 404)
# test delete agent task
url = f"{base_url}/{agent_task.id}/" # type: ignore
url = f"{base_url}/{agent_task.id}/"
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
delete_win_task_schedule.assert_called_with(pk=agent_task.id) # type: ignore
delete_win_task_schedule.assert_called_with(pk=agent_task.id)
remove_orphaned_win_tasks.assert_not_called()
delete_win_task_schedule.reset_mock()
remove_orphaned_win_tasks.reset_mock()
# test delete policy task
url = f"{base_url}/{policy_task.id}/" # type: ignore
url = f"{base_url}/{policy_task.id}/"
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertFalse(AutomatedTask.objects.filter(pk=policy_task.id)) # type: ignore
delete_policy_autotasks_task.assert_called_with(task=policy_task.id) # type: ignore
remove_orphaned_win_tasks.assert_called_once()
delete_win_task_schedule.assert_not_called()
self.check_not_authenticated("delete", url)
@@ -414,7 +367,7 @@ class TestAutotaskViews(TacticalTestCase):
self.assertEqual(resp.status_code, 404)
# test run agent task
url = f"{base_url}/{task.id}/run/" # type: ignore
url = f"{base_url}/{task.id}/run/"
resp = self.client.post(url, format="json")
self.assertEqual(resp.status_code, 200)
run_win_task.assert_called()
@@ -429,11 +382,11 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
@patch("agents.models.Agent.nats_cmd")
def test_remove_orphaned_win_task(self, nats_cmd):
self.agent = baker.make_recipe("agents.agent")
self.task1 = AutomatedTask.objects.create(
agent=self.agent,
agent = baker.make_recipe("agents.online_agent")
baker.make_recipe("agents.offline_agent")
task1 = AutomatedTask.objects.create(
agent=agent,
name="test task 1",
win_task_name=AutomatedTask.generate_task_name(),
)
# test removing an orphaned task
@@ -443,13 +396,13 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
"GoogleUpdateTaskMachineCore",
"GoogleUpdateTaskMachineUA",
"OneDrive Standalone Update Task-S-1-5-21-717461175-241712648-1206041384-1001",
self.task1.win_task_name,
task1.win_task_name,
"TacticalRMM_fixmesh",
"TacticalRMM_SchedReboot_jk324kajd",
"TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb", # orphaned task
]
self.calls = [
calls = [
call({"func": "listschedtasks"}, timeout=10),
call(
{
@@ -463,26 +416,23 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
]
nats_cmd.side_effect = [win_tasks, "ok"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
remove_orphaned_win_tasks()
self.assertEqual(nats_cmd.call_count, 2)
nats_cmd.assert_has_calls(self.calls)
self.assertEqual(ret.status, "SUCCESS")
nats_cmd.assert_has_calls(calls)
# test nats delete task fail
nats_cmd.reset_mock()
nats_cmd.side_effect = [win_tasks, "error deleting task"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
nats_cmd.assert_has_calls(self.calls)
remove_orphaned_win_tasks()
nats_cmd.assert_has_calls(calls)
self.assertEqual(nats_cmd.call_count, 2)
self.assertEqual(ret.status, "SUCCESS")
# no orphaned tasks
nats_cmd.reset_mock()
win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb")
nats_cmd.side_effect = [win_tasks, "ok"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
remove_orphaned_win_tasks()
self.assertEqual(nats_cmd.call_count, 1)
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.nats_cmd")
def test_run_win_task(self, nats_cmd):
@@ -490,159 +440,324 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
self.task1 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 1",
win_task_name=AutomatedTask.generate_task_name(),
)
nats_cmd.return_value = "ok"
ret = run_win_task.s(self.task1.pk).apply()
self.assertEqual(ret.status, "SUCCESS")
# @patch("agents.models.Agent.nats_cmd")
# def test_create_win_task_schedule(self, nats_cmd):
# self.agent = baker.make_recipe("agents.agent")
@patch("agents.models.Agent.nats_cmd")
def test_create_win_task_schedule(self, nats_cmd):
agent = baker.make_recipe("agents.agent", time_zone="UTC")
# task_name = AutomatedTask.generate_task_name()
# # test scheduled task
# self.task1 = AutomatedTask.objects.create(
# agent=self.agent,
# name="test task 1",
# win_task_name=task_name,
# task_type="scheduled",
# run_time_bit_weekdays=127,
# run_time_minute="21:55",
# )
# self.assertEqual(self.task1.sync_status, "initial")
# nats_cmd.return_value = "ok"
# ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
# self.assertEqual(nats_cmd.call_count, 1)
# nats_cmd.assert_called_with(
# {
# "func": "schedtask",
# "schedtaskpayload": {
# "type": "rmm",
# "trigger": "weekly",
# "weekdays": 127,
# "pk": self.task1.pk,
# "name": task_name,
# "hour": 21,
# "min": 55,
# },
# },
# timeout=5,
# )
# self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
# self.assertEqual(self.task1.sync_status, "synced")
# test daily task
task1 = baker.make(
"autotasks.AutomatedTask",
agent=agent,
name="test task 1",
task_type="daily",
daily_interval=1,
run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30),
)
self.assertFalse(TaskResult.objects.filter(agent=agent, task=task1).exists())
# nats_cmd.return_value = "timeout"
# ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
# self.assertEqual(ret.status, "SUCCESS")
# self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
# self.assertEqual(self.task1.sync_status, "initial")
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"pk": task1.pk,
"type": "rmm",
"name": task1.win_task_name,
"overwrite_task": False,
"enabled": True,
"trigger": "daily",
"multiple_instances": 1,
"delete_expired_task_after": False,
"start_when_available": False,
"start_year": int(task1.run_time_date.strftime("%Y")),
"start_month": int(task1.run_time_date.strftime("%-m")),
"start_day": int(task1.run_time_date.strftime("%-d")),
"start_hour": int(task1.run_time_date.strftime("%-H")),
"start_min": int(task1.run_time_date.strftime("%-M")),
"day_interval": 1,
},
},
timeout=5,
)
nats_cmd.reset_mock()
self.assertEqual(
TaskResult.objects.get(task=task1, agent=agent).sync_status, "synced"
)
# # test runonce with future date
# nats_cmd.reset_mock()
# task_name = AutomatedTask.generate_task_name()
# run_time_date = djangotime.now() + djangotime.timedelta(hours=22)
# self.task2 = AutomatedTask.objects.create(
# agent=self.agent,
# name="test task 2",
# win_task_name=task_name,
# task_type="runonce",
# run_time_date=run_time_date,
# )
# nats_cmd.return_value = "ok"
# ret = create_win_task_schedule.s(pk=self.task2.pk).apply()
# nats_cmd.assert_called_with(
# {
# "func": "schedtask",
# "schedtaskpayload": {
# "type": "rmm",
# "trigger": "once",
# "pk": self.task2.pk,
# "name": task_name,
# "year": int(dt.datetime.strftime(self.task2.run_time_date, "%Y")),
# "month": dt.datetime.strftime(self.task2.run_time_date, "%B"),
# "day": int(dt.datetime.strftime(self.task2.run_time_date, "%d")),
# "hour": int(dt.datetime.strftime(self.task2.run_time_date, "%H")),
# "min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
# },
# },
# timeout=5,
# )
# self.assertEqual(ret.status, "SUCCESS")
nats_cmd.return_value = "timeout"
create_win_task_schedule(pk=task1.pk)
self.assertEqual(
TaskResult.objects.get(task=task1, agent=agent).sync_status, "initial"
)
nats_cmd.reset_mock()
# # test runonce with date in the past
# nats_cmd.reset_mock()
# task_name = AutomatedTask.generate_task_name()
# run_time_date = djangotime.now() - djangotime.timedelta(days=13)
# self.task3 = AutomatedTask.objects.create(
# agent=self.agent,
# name="test task 3",
# win_task_name=task_name,
# task_type="runonce",
# run_time_date=run_time_date,
# )
# nats_cmd.return_value = "ok"
# ret = create_win_task_schedule.s(pk=self.task3.pk).apply()
# self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
# self.assertEqual(ret.status, "SUCCESS")
# test weekly task
task1 = baker.make(
"autotasks.AutomatedTask",
agent=agent,
name="test task 1",
task_type="weekly",
weekly_interval=1,
run_asap_after_missed=True,
run_time_bit_weekdays=127,
run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30),
expire_date=djangotime.now() + djangotime.timedelta(days=100),
task_instance_policy=2,
)
# # test checkfailure
# nats_cmd.reset_mock()
# self.check = baker.make_recipe("checks.diskspace_check", agent=self.agent)
# task_name = AutomatedTask.generate_task_name()
# self.task4 = AutomatedTask.objects.create(
# agent=self.agent,
# name="test task 4",
# win_task_name=task_name,
# task_type="checkfailure",
# assigned_check=self.check,
# )
# nats_cmd.return_value = "ok"
# ret = create_win_task_schedule.s(pk=self.task4.pk).apply()
# nats_cmd.assert_called_with(
# {
# "func": "schedtask",
# "schedtaskpayload": {
# "type": "rmm",
# "trigger": "manual",
# "pk": self.task4.pk,
# "name": task_name,
# },
# },
# timeout=5,
# )
# self.assertEqual(ret.status, "SUCCESS")
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"pk": task1.pk,
"type": "rmm",
"name": task1.win_task_name,
"overwrite_task": False,
"enabled": True,
"trigger": "weekly",
"multiple_instances": 2,
"delete_expired_task_after": False,
"start_when_available": True,
"start_year": int(task1.run_time_date.strftime("%Y")),
"start_month": int(task1.run_time_date.strftime("%-m")),
"start_day": int(task1.run_time_date.strftime("%-d")),
"start_hour": int(task1.run_time_date.strftime("%-H")),
"start_min": int(task1.run_time_date.strftime("%-M")),
"expire_year": int(task1.expire_date.strftime("%Y")),
"expire_month": int(task1.expire_date.strftime("%-m")),
"expire_day": int(task1.expire_date.strftime("%-d")),
"expire_hour": int(task1.expire_date.strftime("%-H")),
"expire_min": int(task1.expire_date.strftime("%-M")),
"week_interval": 1,
"days_of_week": 127,
},
},
timeout=5,
)
nats_cmd.reset_mock()
# # test manual
# nats_cmd.reset_mock()
# task_name = AutomatedTask.generate_task_name()
# self.task5 = AutomatedTask.objects.create(
# agent=self.agent,
# name="test task 5",
# win_task_name=task_name,
# task_type="manual",
# )
# nats_cmd.return_value = "ok"
# ret = create_win_task_schedule.s(pk=self.task5.pk).apply()
# nats_cmd.assert_called_with(
# {
# "func": "schedtask",
# "schedtaskpayload": {
# "type": "rmm",
# "trigger": "manual",
# "pk": self.task5.pk,
# "name": task_name,
# },
# },
# timeout=5,
# )
# self.assertEqual(ret.status, "SUCCESS")
# test monthly task
task1 = baker.make(
"autotasks.AutomatedTask",
agent=agent,
name="test task 1",
task_type="monthly",
random_task_delay="3M",
task_repetition_interval="15M",
task_repetition_duration="1D",
stop_task_at_duration_end=True,
monthly_days_of_month=0x80000030,
monthly_months_of_year=0x400,
run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30),
)
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"pk": task1.pk,
"type": "rmm",
"name": task1.win_task_name,
"overwrite_task": False,
"enabled": True,
"trigger": "monthly",
"multiple_instances": 1,
"delete_expired_task_after": False,
"start_when_available": False,
"start_year": int(task1.run_time_date.strftime("%Y")),
"start_month": int(task1.run_time_date.strftime("%-m")),
"start_day": int(task1.run_time_date.strftime("%-d")),
"start_hour": int(task1.run_time_date.strftime("%-H")),
"start_min": int(task1.run_time_date.strftime("%-M")),
"random_delay": "PT3M",
"repetition_interval": "PT15M",
"repetition_duration": "P1DT",
"stop_at_duration_end": True,
"days_of_month": 0x30,
"run_on_last_day_of_month": True,
"months_of_year": 1024,
},
},
timeout=5,
)
nats_cmd.reset_mock()
# test monthly dow
task1 = baker.make(
"autotasks.AutomatedTask",
agent=agent,
name="test task 1",
task_type="monthlydow",
run_time_bit_weekdays=56,
monthly_months_of_year=0x400,
monthly_weeks_of_month=3,
run_time_date=djangotime.now() + djangotime.timedelta(hours=3, minutes=30),
)
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"pk": task1.pk,
"type": "rmm",
"name": task1.win_task_name,
"overwrite_task": False,
"enabled": True,
"trigger": "monthlydow",
"multiple_instances": 1,
"delete_expired_task_after": False,
"start_when_available": False,
"start_year": int(task1.run_time_date.strftime("%Y")),
"start_month": int(task1.run_time_date.strftime("%-m")),
"start_day": int(task1.run_time_date.strftime("%-d")),
"start_hour": int(task1.run_time_date.strftime("%-H")),
"start_min": int(task1.run_time_date.strftime("%-M")),
"days_of_week": 56,
"months_of_year": 0x400,
"weeks_of_month": 3,
},
},
timeout=5,
)
nats_cmd.reset_mock()
# test runonce with future date
task1 = baker.make(
"autotasks.AutomatedTask",
agent=agent,
name="test task 2",
task_type="runonce",
run_time_date=djangotime.now() + djangotime.timedelta(hours=22),
run_asap_after_missed=True,
)
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"pk": task1.pk,
"type": "rmm",
"name": task1.win_task_name,
"overwrite_task": False,
"enabled": True,
"trigger": "runonce",
"multiple_instances": 1,
"delete_expired_task_after": False,
"start_when_available": True,
"start_year": int(task1.run_time_date.strftime("%Y")),
"start_month": int(task1.run_time_date.strftime("%-m")),
"start_day": int(task1.run_time_date.strftime("%-d")),
"start_hour": int(task1.run_time_date.strftime("%-H")),
"start_min": int(task1.run_time_date.strftime("%-M")),
},
},
timeout=5,
)
nats_cmd.reset_mock()
# test runonce with date in the past
task1 = baker.make(
"autotasks.AutomatedTask",
agent=agent,
name="test task 3",
task_type="runonce",
run_asap_after_missed=True,
run_time_date=djangotime.datetime(2018, 6, 1, 23, 23, 23),
)
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called()
# check if task is scheduled for at most 5min in the future
_, args, _ = nats_cmd.mock_calls[0]
current_minute = int(djangotime.now().strftime("%-M"))
if current_minute >= 55 and current_minute < 60:
self.assertLess(
args[0]["schedtaskpayload"]["start_min"],
int(djangotime.now().strftime("%-M")),
)
else:
self.assertGreater(
args[0]["schedtaskpayload"]["start_min"],
int(djangotime.now().strftime("%-M")),
)
# test checkfailure task
nats_cmd.reset_mock()
check = baker.make_recipe("checks.diskspace_check", agent=agent)
task1 = baker.make(
"autotasks.AutomatedTask",
agent=agent,
name="test task 4",
task_type="checkfailure",
assigned_check=check,
)
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"pk": task1.pk,
"type": "rmm",
"name": task1.win_task_name,
"overwrite_task": False,
"enabled": True,
"trigger": "manual",
"multiple_instances": 1,
"delete_expired_task_after": False,
"start_when_available": False,
},
},
timeout=5,
)
nats_cmd.reset_mock()
# test manual
task1 = AutomatedTask.objects.create(
agent=agent,
name="test task 5",
task_type="manual",
)
nats_cmd.return_value = "ok"
create_win_task_schedule(pk=task1.pk)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"pk": task1.pk,
"type": "rmm",
"name": task1.win_task_name,
"overwrite_task": False,
"enabled": True,
"trigger": "manual",
"multiple_instances": 1,
"delete_expired_task_after": False,
"start_when_available": False,
},
},
timeout=5,
)
class TestTaskPermissions(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
self.client_setup()
self.setup_client()
def test_get_tasks_permissions(self):
agent = baker.make_recipe("agents.agent")
@@ -709,7 +824,7 @@ class TestTaskPermissions(TacticalTestCase):
script = baker.make("scripts.Script")
policy_data = {
"policy": policy.id, # type: ignore
"policy": policy.id,
"name": "Test Task Manual",
"run_time_days": [],
"timeout": 120,
@@ -855,9 +970,3 @@ class TestTaskPermissions(TacticalTestCase):
self.check_authorized("post", url)
self.check_not_authorized("post", unauthorized_url)
def test_policy_fields_to_copy_exists(self):
fields = [i.name for i in AutomatedTask._meta.get_fields()]
task = baker.make("autotasks.AutomatedTask")
for i in task.policy_fields_to_copy: # type: ignore
self.assertIn(i, fields)

View File

@@ -1,16 +1,17 @@
from agents.models import Agent
from automation.models import Policy
from django.shortcuts import get_object_or_404
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from agents.models import Agent
from automation.models import Policy
from tacticalrmm.permissions import _has_perm_on_agent
from .models import AutomatedTask
from .permissions import AutoTaskPerms, RunAutoTaskPerms
from .serializers import TaskSerializer
from .tasks import remove_orphaned_win_tasks
class GetAddAutoTasks(APIView):
@@ -20,16 +21,15 @@ class GetAddAutoTasks(APIView):
if agent_id:
agent = get_object_or_404(Agent, agent_id=agent_id)
tasks = AutomatedTask.objects.filter(agent=agent)
tasks = agent.get_tasks_with_policies()
elif policy:
policy = get_object_or_404(Policy, id=policy)
tasks = AutomatedTask.objects.filter(policy=policy)
else:
tasks = AutomatedTask.objects.filter_by_role(request.user)
tasks = AutomatedTask.objects.filter_by_role(request.user) # type: ignore
return Response(TaskSerializer(tasks, many=True).data)
def post(self, request):
from automation.tasks import generate_agent_autotasks_task
from autotasks.tasks import create_win_task_schedule
data = request.data.copy()
@@ -45,16 +45,11 @@ class GetAddAutoTasks(APIView):
serializer = TaskSerializer(data=data)
serializer.is_valid(raise_exception=True)
task = serializer.save(
win_task_name=AutomatedTask.generate_task_name(),
)
task = serializer.save()
if task.agent:
create_win_task_schedule.delay(pk=task.pk)
elif task.policy:
generate_agent_autotasks_task.delay(policy=task.policy.pk)
return Response(
"The task has been created. It will show up on the agent on next checkin"
)
@@ -86,7 +81,6 @@ class GetEditDeleteAutoTask(APIView):
return Response("The task was updated")
def delete(self, request, pk):
from automation.tasks import delete_policy_autotasks_task
from autotasks.tasks import delete_win_task_schedule
task = get_object_or_404(AutomatedTask, pk=pk)
@@ -96,9 +90,9 @@ class GetEditDeleteAutoTask(APIView):
if task.agent:
delete_win_task_schedule.delay(pk=task.pk)
elif task.policy:
delete_policy_autotasks_task.delay(task=task.pk)
else:
task.delete()
remove_orphaned_win_tasks.delay()
return Response(f"{task.name} will be deleted shortly")
@@ -114,5 +108,14 @@ class RunAutoTask(APIView):
if task.agent and not _has_perm_on_agent(request.user, task.agent.agent_id):
raise PermissionDenied()
run_win_task.delay(pk=pk)
return Response(f"{task.name} will now be run on {task.agent.hostname}")
# run policy task on agent
if "agent_id" in request.data.keys():
if not _has_perm_on_agent(request.user, request.data["agent_id"]):
raise PermissionDenied()
run_win_task.delay(pk=pk, agent_id=request.data["agent_id"])
# run normal task on agent
else:
run_win_task.delay(pk=pk)
return Response(f"{task.name} will now be run.")

View File

@@ -1,6 +1,7 @@
from django.contrib import admin
from .models import Check, CheckHistory
from .models import Check, CheckHistory, CheckResult
admin.site.register(Check)
admin.site.register(CheckHistory)
admin.site.register(CheckResult)

View File

@@ -1,23 +1,25 @@
from model_bakery.recipe import Recipe
from tacticalrmm.constants import CheckType, EvtLogTypes
check = Recipe("checks.Check")
diskspace_check = check.extend(
check_type="diskspace", disk="C:", warning_threshold=30, error_threshold=10
check_type=CheckType.DISK_SPACE, disk="C:", warning_threshold=30, error_threshold=10
)
cpuload_check = check.extend(
check_type="cpuload", warning_threshold=30, error_threshold=75
check_type=CheckType.CPU_LOAD, warning_threshold=30, error_threshold=75
)
ping_check = check.extend(check_type="ping", ip="10.10.10.10")
memory_check = check.extend(
check_type="memory", warning_threshold=60, error_threshold=75
check_type=CheckType.MEMORY, warning_threshold=60, error_threshold=75
)
winsvc_check = check.extend(
check_type="winsvc",
check_type=CheckType.WINSVC,
svc_name="ServiceName",
svc_display_name="ServiceName",
svc_policy_mode="manual",
@@ -25,9 +27,9 @@ winsvc_check = check.extend(
)
eventlog_check = check.extend(
check_type="eventlog", event_id=5000, event_type="application"
check_type=CheckType.EVENT_LOG, event_id=5000, event_type=EvtLogTypes.INFO
)
script_check = check.extend(
name="Script Name", check_type="script", script__name="Script Name"
name="Script Name", check_type=CheckType.SCRIPT, script__name="Script Name"
)

View File

@@ -0,0 +1,26 @@
CHECK_DEFER = (
"created_by",
"created_time",
"modified_by",
"modified_time",
"timeout",
"svc_display_name",
"svc_policy_mode",
"log_name",
"event_id",
"event_id_is_wildcard",
"event_type",
"event_source",
"event_message",
"fail_when",
"search_last_days",
)
CHECK_RESULT_DEFER = (
"more_info",
"outage_history",
"extra_details",
"stdout",
"stderr",
"execution_time",
)

View File

@@ -0,0 +1,49 @@
# Generated by Django 3.2.12 on 2022-04-01 22:44
import django.contrib.postgres.fields
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0046_alter_agenthistory_command'),
('checks', '0025_auto_20210917_1954'),
]
operations = [
migrations.RenameField(
model_name='check',
old_name='overriden_by_policy',
new_name='overridden_by_policy',
),
migrations.AddField(
model_name='checkhistory',
name='agent_id',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.CreateModel(
name='CheckResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('passing', 'Passing'), ('failing', 'Failing'), ('pending', 'Pending')], default='pending', max_length=100)),
('alert_severity', models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15, null=True)),
('more_info', models.TextField(blank=True, null=True)),
('last_run', models.DateTimeField(blank=True, null=True)),
('fail_count', models.PositiveIntegerField(default=0)),
('outage_history', models.JSONField(blank=True, null=True)),
('extra_details', models.JSONField(blank=True, null=True)),
('stdout', models.TextField(blank=True, null=True)),
('stderr', models.TextField(blank=True, null=True)),
('retcode', models.IntegerField(blank=True, null=True)),
('execution_time', models.CharField(blank=True, max_length=100, null=True)),
('history', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(blank=True), blank=True, default=list, null=True, size=None)),
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='checkresults', to='agents.agent')),
('assigned_check', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='checkresults', to='checks.check')),
],
options={
'unique_together': {('agent', 'assigned_check')},
},
),
]

View File

@@ -0,0 +1,80 @@
# Generated by Django 3.2.12 on 2022-04-01 22:48
from django.db import migrations, transaction
from django.db.utils import IntegrityError
from tacticalrmm.constants import CheckType
def migrate_check_results(apps, schema_editor):
Check = apps.get_model("checks", "Check")
CheckResult = apps.get_model("checks", "CheckResult")
for check in Check.objects.exclude(agent=None).iterator():
try:
with transaction.atomic():
if check.managed_by_policy:
CheckResult.objects.create(
assigned_check_id=check.parent_check,
agent_id=check.agent.id,
status=check.status,
more_info=check.more_info,
last_run=check.last_run,
fail_count=check.fail_count,
outage_history=check.outage_history,
extra_details=check.extra_details,
stdout=check.stdout,
stderr=check.stderr,
retcode=check.retcode,
execution_time=check.execution_time,
history=check.history,
alert_severity=check.alert_severity
if check.check_type
in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]
else None,
)
else:
CheckResult.objects.create(
assigned_check_id=check.id,
agent_id=check.agent.id,
status=check.status,
more_info=check.more_info,
last_run=check.last_run,
fail_count=check.fail_count,
outage_history=check.outage_history,
extra_details=check.extra_details,
stdout=check.stdout,
stderr=check.stderr,
retcode=check.retcode,
execution_time=check.execution_time,
history=check.history,
alert_severity=check.alert_severity
if check.check_type
in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]
else None,
)
except IntegrityError:
pass
class Migration(migrations.Migration):
atomic = False
dependencies = [
("checks", "0026_auto_20220401_2244"),
]
operations = [
migrations.RunPython(migrate_check_results),
]

View File

@@ -0,0 +1,61 @@
# Generated by Django 3.2.12 on 2022-04-01 23:01
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('checks', '0027_auto_20220401_2248'),
]
operations = [
migrations.RemoveField(
model_name='check',
name='execution_time',
),
migrations.RemoveField(
model_name='check',
name='extra_details',
),
migrations.RemoveField(
model_name='check',
name='fail_count',
),
migrations.RemoveField(
model_name='check',
name='history',
),
migrations.RemoveField(
model_name='check',
name='last_run',
),
migrations.RemoveField(
model_name='check',
name='more_info',
),
migrations.RemoveField(
model_name='check',
name='outage_history',
),
migrations.RemoveField(
model_name='check',
name='parent_check',
),
migrations.RemoveField(
model_name='check',
name='retcode',
),
migrations.RemoveField(
model_name='check',
name='status',
),
migrations.RemoveField(
model_name='check',
name='stderr',
),
migrations.RemoveField(
model_name='check',
name='stdout',
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 4.0.3 on 2022-04-15 21:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('checks', '0028_auto_20220401_2301'),
]
operations = [
migrations.AlterField(
model_name='checkresult',
name='alert_severity',
field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=15, null=True),
),
]

View File

@@ -1,50 +1,30 @@
from statistics import mean
from typing import Any
from typing import TYPE_CHECKING, Any, Dict, Optional, Union
import pytz
from alerts.models import SEVERITY_CHOICES
from core.models import CoreSettings
from django.contrib.postgres.fields import ArrayField
from django.core.cache import cache
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from logs.models import BaseAuditModel
from django.utils import timezone as djangotime
from alerts.models import SEVERITY_CHOICES
from core.utils import get_core_settings
from logs.models import BaseAuditModel
from tacticalrmm.constants import (
CHECKS_NON_EDITABLE_FIELDS,
POLICY_CHECK_FIELDS_TO_COPY,
CheckStatus,
CheckType,
EvtLogFailWhen,
EvtLogNames,
EvtLogTypes,
)
from tacticalrmm.models import PermissionQuerySet
CHECK_TYPE_CHOICES = [
("diskspace", "Disk Space Check"),
("ping", "Ping Check"),
("cpuload", "CPU Load Check"),
("memory", "Memory Check"),
("winsvc", "Service Check"),
("script", "Script Check"),
("eventlog", "Event Log Check"),
]
CHECK_STATUS_CHOICES = [
("passing", "Passing"),
("failing", "Failing"),
("pending", "Pending"),
]
EVT_LOG_NAME_CHOICES = [
("Application", "Application"),
("System", "System"),
("Security", "Security"),
]
EVT_LOG_TYPE_CHOICES = [
("INFO", "Information"),
("WARNING", "Warning"),
("ERROR", "Error"),
("AUDIT_SUCCESS", "Success Audit"),
("AUDIT_FAILURE", "Failure Audit"),
]
EVT_LOG_FAIL_WHEN_CHOICES = [
("contains", "Log contains"),
("not_contains", "Log does not contain"),
]
if TYPE_CHECKING:
from agents.models import Agent # pragma: no cover
from alerts.models import Alert, AlertTemplate # pragma: no cover
from automation.models import Policy # pragma: no cover
class Check(BaseAuditModel):
@@ -66,25 +46,15 @@ class Check(BaseAuditModel):
blank=True,
on_delete=models.CASCADE,
)
managed_by_policy = models.BooleanField(default=False)
overriden_by_policy = models.BooleanField(default=False)
parent_check = models.PositiveIntegerField(null=True, blank=True)
overridden_by_policy = models.BooleanField(default=False)
name = models.CharField(max_length=255, null=True, blank=True)
check_type = models.CharField(
max_length=50, choices=CHECK_TYPE_CHOICES, default="diskspace"
max_length=50, choices=CheckType.choices, default=CheckType.DISK_SPACE
)
status = models.CharField(
max_length=100, choices=CHECK_STATUS_CHOICES, default="pending"
)
more_info = models.TextField(null=True, blank=True)
last_run = models.DateTimeField(null=True, blank=True)
email_alert = models.BooleanField(default=False)
text_alert = models.BooleanField(default=False)
dashboard_alert = models.BooleanField(default=False)
fails_b4_alert = models.PositiveIntegerField(default=1)
fail_count = models.PositiveIntegerField(default=0)
outage_history = models.JSONField(null=True, blank=True) # store
extra_details = models.JSONField(null=True, blank=True)
run_interval = models.PositiveIntegerField(blank=True, default=0)
# check specific fields
@@ -141,14 +111,6 @@ class Check(BaseAuditModel):
default=list,
)
timeout = models.PositiveIntegerField(null=True, blank=True)
stdout = models.TextField(null=True, blank=True)
stderr = models.TextField(null=True, blank=True)
retcode = models.IntegerField(null=True, blank=True)
execution_time = models.CharField(max_length=100, null=True, blank=True)
# cpu and mem check history
history = ArrayField(
models.IntegerField(blank=True), null=True, blank=True, default=list
)
# win service checks
svc_name = models.CharField(max_length=255, null=True, blank=True)
svc_display_name = models.CharField(max_length=255, null=True, blank=True)
@@ -161,32 +123,71 @@ class Check(BaseAuditModel):
# event log checks
log_name = models.CharField(
max_length=255, choices=EVT_LOG_NAME_CHOICES, null=True, blank=True
max_length=255, choices=EvtLogNames.choices, null=True, blank=True
)
event_id = models.IntegerField(null=True, blank=True)
event_id_is_wildcard = models.BooleanField(default=False)
event_type = models.CharField(
max_length=255, choices=EVT_LOG_TYPE_CHOICES, null=True, blank=True
max_length=255, choices=EvtLogTypes.choices, null=True, blank=True
)
event_source = models.CharField(max_length=255, null=True, blank=True)
event_message = models.TextField(null=True, blank=True)
fail_when = models.CharField(
max_length=255, choices=EVT_LOG_FAIL_WHEN_CHOICES, null=True, blank=True
max_length=255, choices=EvtLogFailWhen.choices, null=True, blank=True
)
search_last_days = models.PositiveIntegerField(null=True, blank=True)
number_of_events_b4_alert = models.PositiveIntegerField(
null=True, blank=True, default=1
)
# deprecated
managed_by_policy = models.BooleanField(default=False)
# non-database property
check_result: "Union[CheckResult, Dict[None, None]]" = {}
def __str__(self):
if self.agent:
return f"{self.agent.hostname} - {self.readable_desc}"
else:
return f"{self.policy.name} - {self.readable_desc}"
def save(self, *args, **kwargs):
# if check is a policy check clear cache on everything
if self.policy:
cache.delete_many_pattern("site_*_checks")
cache.delete_many_pattern("agent_*_checks")
# if check is an agent check
elif self.agent:
cache.delete(f"agent_{self.agent.agent_id}_checks")
super(Check, self).save(
*args,
**kwargs,
)
def delete(self, *args, **kwargs):
# if check is a policy check clear cache on everything
if self.policy:
cache.delete_many_pattern("site_*_checks")
cache.delete_many_pattern("agent_*_checks")
# if check is an agent check
elif self.agent:
cache.delete(f"agent_{self.agent.agent_id}_checks")
super(Check, self).delete(
*args,
**kwargs,
)
@property
def readable_desc(self):
if self.check_type == "diskspace":
display = self.get_check_type_display() # type: ignore
if self.check_type == CheckType.DISK_SPACE:
text = ""
if self.warning_threshold:
@@ -194,10 +195,12 @@ class Check(BaseAuditModel):
if self.error_threshold:
text += f" Error Threshold: {self.error_threshold}%"
return f"{self.get_check_type_display()}: Drive {self.disk} - {text}" # type: ignore
elif self.check_type == "ping":
return f"{self.get_check_type_display()}: {self.name}" # type: ignore
elif self.check_type == "cpuload" or self.check_type == "memory":
return f"{display}: Drive {self.disk} - {text}"
elif self.check_type == CheckType.PING:
return f"{display}: {self.name}"
elif (
self.check_type == CheckType.CPU_LOAD or self.check_type == CheckType.MEMORY
):
text = ""
if self.warning_threshold:
@@ -205,91 +208,35 @@ class Check(BaseAuditModel):
if self.error_threshold:
text += f" Error Threshold: {self.error_threshold}%"
return f"{self.get_check_type_display()} - {text}" # type: ignore
elif self.check_type == "winsvc":
return f"{self.get_check_type_display()}: {self.svc_display_name}" # type: ignore
elif self.check_type == "eventlog":
return f"{self.get_check_type_display()}: {self.name}" # type: ignore
elif self.check_type == "script":
return f"{self.get_check_type_display()}: {self.script.name}" # type: ignore
return f"{display} - {text}"
elif self.check_type == CheckType.WINSVC:
return f"{display}: {self.svc_display_name}"
elif self.check_type == CheckType.EVENT_LOG:
return f"{display}: {self.name}"
elif self.check_type == CheckType.SCRIPT:
return f"{display}: {self.script.name}"
else:
return "n/a"
@property
def history_info(self):
if self.check_type == "cpuload" or self.check_type == "memory":
return ", ".join(str(f"{x}%") for x in self.history[-6:])
@property
def last_run_as_timezone(self):
if self.last_run is not None and self.agent is not None:
return self.last_run.astimezone(
pytz.timezone(self.agent.timezone)
).strftime("%b-%d-%Y - %H:%M")
return self.last_run
@staticmethod
def non_editable_fields() -> list[str]:
return [
"check_type",
"more_info",
"last_run",
"fail_count",
"outage_history",
"extra_details",
"status",
"stdout",
"stderr",
"retcode",
"execution_time",
"history",
"readable_desc",
"history_info",
"parent_check",
"managed_by_policy",
"overriden_by_policy",
"created_by",
"created_time",
"modified_by",
"modified_time",
]
return CHECKS_NON_EDITABLE_FIELDS
@property
def policy_fields_to_copy(self) -> list[str]:
return [
"warning_threshold",
"error_threshold",
"alert_severity",
"name",
"run_interval",
"disk",
"fails_b4_alert",
"ip",
"script",
"script_args",
"info_return_codes",
"warning_return_codes",
"timeout",
"svc_name",
"svc_display_name",
"svc_policy_mode",
"pass_if_start_pending",
"pass_if_svc_not_exist",
"restart_if_stopped",
"log_name",
"event_id",
"event_id_is_wildcard",
"event_type",
"event_source",
"event_message",
"fail_when",
"search_last_days",
"number_of_events_b4_alert",
"email_alert",
"text_alert",
"dashboard_alert",
]
def create_policy_check(self, policy: "Policy") -> None:
fields_to_copy = POLICY_CHECK_FIELDS_TO_COPY
check = Check.objects.create(
policy=policy,
)
for task in self.assignedtasks.all(): # type: ignore
task.create_policy_task(policy=policy, assigned_check=check)
for field in fields_to_copy:
setattr(check, field, getattr(self, field))
check.save()
def should_create_alert(self, alert_template=None):
@@ -307,85 +254,197 @@ class Check(BaseAuditModel):
)
)
def add_check_history(self, value: int, more_info: Any = None) -> None:
CheckHistory.objects.create(check_id=self.pk, y=value, results=more_info)
def add_check_history(
self, value: int, agent_id: str, more_info: Any = None
) -> None:
CheckHistory.objects.create(
check_id=self.pk, y=value, results=more_info, agent_id=agent_id
)
def handle_check(self, data):
@staticmethod
def serialize(check):
# serializes the check and returns json
from .serializers import CheckAuditSerializer
return CheckAuditSerializer(check).data
def is_duplicate(self, check):
if self.check_type == CheckType.DISK_SPACE:
return self.disk == check.disk
elif self.check_type == CheckType.SCRIPT:
return self.script == check.script
elif self.check_type == CheckType.PING:
return self.ip == check.ip
elif self.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
return True
elif self.check_type == CheckType.WINSVC:
return self.svc_name == check.svc_name
elif self.check_type == CheckType.EVENT_LOG:
return [self.log_name, self.event_id] == [check.log_name, check.event_id]
class CheckResult(models.Model):
objects = PermissionQuerySet.as_manager()
class Meta:
unique_together = (("agent", "assigned_check"),)
agent = models.ForeignKey(
"agents.Agent",
related_name="checkresults",
on_delete=models.CASCADE,
)
assigned_check = models.ForeignKey(
"checks.Check",
related_name="checkresults",
on_delete=models.CASCADE,
)
status = models.CharField(
max_length=100, choices=CheckStatus.choices, default=CheckStatus.PENDING
)
# for memory, diskspace, script, and cpu checks where severity changes
alert_severity = models.CharField(
max_length=15,
choices=SEVERITY_CHOICES,
null=True,
blank=True,
)
more_info = models.TextField(null=True, blank=True)
last_run = models.DateTimeField(null=True, blank=True)
fail_count = models.PositiveIntegerField(default=0)
outage_history = models.JSONField(null=True, blank=True) # store
extra_details = models.JSONField(null=True, blank=True)
stdout = models.TextField(null=True, blank=True)
stderr = models.TextField(null=True, blank=True)
retcode = models.IntegerField(null=True, blank=True)
execution_time = models.CharField(max_length=100, null=True, blank=True)
# cpu and mem check history
history = ArrayField(
models.IntegerField(blank=True), null=True, blank=True, default=list
)
def __str__(self):
return f"{self.agent.hostname} - {self.assigned_check}"
def save(self, *args, **kwargs):
# if check is a policy check clear cache on everything
if not self.alert_severity and self.assigned_check.check_type in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]:
self.alert_severity = "warning"
super(CheckResult, self).save(
*args,
**kwargs,
)
@property
def history_info(self):
if self.assigned_check.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
return ", ".join(str(f"{x}%") for x in self.history[-6:])
def get_or_create_alert_if_needed(
self, alert_template: "Optional[AlertTemplate]"
) -> "Optional[Alert]":
from alerts.models import Alert
return Alert.create_or_return_check_alert(
self.assigned_check,
agent=self.agent,
alert_severity=self.alert_severity,
skip_create=not self.assigned_check.should_create_alert(alert_template),
)
def handle_check(self, data, check: "Check", agent: "Agent"):
from alerts.models import Alert
update_fields = []
# cpuload or mem checks
if self.check_type == "cpuload" or self.check_type == "memory":
if check.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
self.history.append(data["percent"])
if len(self.history) > 15:
self.history = self.history[-15:]
self.save(update_fields=["history"])
update_fields.extend(["history"])
avg = int(mean(self.history))
if self.error_threshold and avg > self.error_threshold:
self.status = "failing"
if check.error_threshold and avg > check.error_threshold:
self.status = CheckStatus.FAILING
self.alert_severity = "error"
elif self.warning_threshold and avg > self.warning_threshold:
self.status = "failing"
elif check.warning_threshold and avg > check.warning_threshold:
self.status = CheckStatus.FAILING
self.alert_severity = "warning"
else:
self.status = "passing"
self.status = CheckStatus.PASSING
# add check history
self.add_check_history(data["percent"])
check.add_check_history(data["percent"], agent.agent_id)
# diskspace checks
elif self.check_type == "diskspace":
elif check.check_type == CheckType.DISK_SPACE:
if data["exists"]:
percent_used = round(data["percent_used"])
if self.error_threshold and (100 - percent_used) < self.error_threshold:
self.status = "failing"
if (
check.error_threshold
and (100 - percent_used) < check.error_threshold
):
self.status = CheckStatus.FAILING
self.alert_severity = "error"
elif (
self.warning_threshold
and (100 - percent_used) < self.warning_threshold
check.warning_threshold
and (100 - percent_used) < check.warning_threshold
):
self.status = "failing"
self.status = CheckStatus.FAILING
self.alert_severity = "warning"
else:
self.status = "passing"
self.status = CheckStatus.PASSING
self.more_info = data["more_info"]
# add check history
self.add_check_history(100 - percent_used)
check.add_check_history(100 - percent_used, agent.agent_id)
else:
self.status = "failing"
self.status = CheckStatus.FAILING
self.alert_severity = "error"
self.more_info = f"Disk {self.disk} does not exist"
self.more_info = f"Disk {check.disk} does not exist"
self.save(update_fields=["more_info"])
update_fields.extend(["more_info"])
# script checks
elif self.check_type == "script":
elif check.check_type == CheckType.SCRIPT:
self.stdout = data["stdout"]
self.stderr = data["stderr"]
self.retcode = data["retcode"]
self.execution_time = "{:.4f}".format(data["runtime"])
if data["retcode"] in self.info_return_codes:
if data["retcode"] in check.info_return_codes:
self.alert_severity = "info"
self.status = "failing"
elif data["retcode"] in self.warning_return_codes:
self.status = CheckStatus.FAILING
elif data["retcode"] in check.warning_return_codes:
self.alert_severity = "warning"
self.status = "failing"
self.status = CheckStatus.FAILING
elif data["retcode"] != 0:
self.status = "failing"
self.status = CheckStatus.FAILING
self.alert_severity = "error"
else:
self.status = "passing"
self.status = CheckStatus.PASSING
self.save(
update_fields=[
update_fields.extend(
[
"stdout",
"stderr",
"retcode",
@@ -394,8 +453,9 @@ class Check(BaseAuditModel):
)
# add check history
self.add_check_history(
1 if self.status == "failing" else 0,
check.add_check_history(
1 if self.status == CheckStatus.FAILING else 0,
agent.agent_id,
{
"retcode": data["retcode"],
"stdout": data["stdout"][:60],
@@ -405,127 +465,79 @@ class Check(BaseAuditModel):
)
# ping checks
elif self.check_type == "ping":
elif check.check_type == CheckType.PING:
self.status = data["status"]
self.more_info = data["output"]
self.save(update_fields=["more_info"])
update_fields.extend(["more_info"])
self.add_check_history(
1 if self.status == "failing" else 0, self.more_info[:60]
check.add_check_history(
1 if self.status == CheckStatus.FAILING else 0,
agent.agent_id,
self.more_info[:60],
)
# windows service checks
elif self.check_type == "winsvc":
elif check.check_type == CheckType.WINSVC:
self.status = data["status"]
self.more_info = data["more_info"]
self.save(update_fields=["more_info"])
update_fields.extend(["more_info"])
self.add_check_history(
1 if self.status == "failing" else 0, self.more_info[:60]
check.add_check_history(
1 if self.status == CheckStatus.FAILING else 0,
agent.agent_id,
self.more_info[:60],
)
elif self.check_type == "eventlog":
elif check.check_type == CheckType.EVENT_LOG:
log = data["log"]
if self.fail_when == "contains":
if log and len(log) >= self.number_of_events_b4_alert:
self.status = "failing"
if check.fail_when == EvtLogFailWhen.CONTAINS:
if log and len(log) >= check.number_of_events_b4_alert:
self.status = CheckStatus.FAILING
else:
self.status = "passing"
self.status = CheckStatus.PASSING
elif self.fail_when == "not_contains":
if log and len(log) >= self.number_of_events_b4_alert:
self.status = "passing"
elif check.fail_when == EvtLogFailWhen.NOT_CONTAINS:
if log and len(log) >= check.number_of_events_b4_alert:
self.status = CheckStatus.PASSING
else:
self.status = "failing"
self.status = CheckStatus.FAILING
self.extra_details = {"log": log}
self.save(update_fields=["extra_details"])
update_fields.extend(["extra_details"])
self.add_check_history(
1 if self.status == "failing" else 0,
check.add_check_history(
1 if self.status == CheckStatus.FAILING else 0,
agent.agent_id,
"Events Found:" + str(len(self.extra_details["log"])),
)
self.last_run = djangotime.now()
# handle status
if self.status == "failing":
if self.status == CheckStatus.FAILING:
self.fail_count += 1
self.save(update_fields=["status", "fail_count", "alert_severity"])
update_fields.extend(["status", "fail_count", "alert_severity", "last_run"])
self.save(update_fields=update_fields)
if self.fail_count >= self.fails_b4_alert:
if self.fail_count >= check.fails_b4_alert:
Alert.handle_alert_failure(self)
elif self.status == "passing":
elif self.status == CheckStatus.PASSING:
self.fail_count = 0
self.save()
if Alert.objects.filter(assigned_check=self, resolved=False).exists():
update_fields.extend(["status", "fail_count", "alert_severity", "last_run"])
self.save(update_fields=update_fields)
if Alert.objects.filter(
assigned_check=check, agent=agent, resolved=False
).exists():
Alert.handle_alert_resolve(self)
else:
update_fields.extend(["last_run"])
self.save(update_fields=update_fields)
return self.status
def handle_assigned_task(self) -> None:
for task in self.assignedtask.all(): # type: ignore
if task.enabled:
task.run_win_task()
@staticmethod
def serialize(check):
# serializes the check and returns json
from .serializers import CheckAuditSerializer
return CheckAuditSerializer(check).data
def create_policy_check(self, agent=None, policy=None):
if (not agent and not policy) or (agent and policy):
return
check = Check.objects.create(
agent=agent,
policy=policy,
managed_by_policy=bool(agent),
parent_check=(self.pk if agent else None),
check_type=self.check_type,
script=self.script,
)
for task in self.assignedtask.all(): # type: ignore
if policy or (
agent and not agent.autotasks.filter(parent_task=task.pk).exists()
):
task.create_policy_task(
agent=agent, policy=policy, assigned_check=check
)
for field in self.policy_fields_to_copy:
setattr(check, field, getattr(self, field))
check.save()
def is_duplicate(self, check):
if self.check_type == "diskspace":
return self.disk == check.disk
elif self.check_type == "script":
return self.script == check.script
elif self.check_type == "ping":
return self.ip == check.ip
elif self.check_type == "cpuload":
return True
elif self.check_type == "memory":
return True
elif self.check_type == "winsvc":
return self.svc_name == check.svc_name
elif self.check_type == "eventlog":
return [self.log_name, self.event_id] == [check.log_name, check.event_id]
def send_email(self):
CORE = CoreSettings.objects.first()
CORE = get_core_settings()
body: str = ""
if self.agent:
@@ -533,66 +545,66 @@ class Check(BaseAuditModel):
else:
subject = f"{self} Failed"
if self.check_type == "diskspace":
if self.assigned_check.check_type == CheckType.DISK_SPACE:
text = ""
if self.warning_threshold:
text += f" Warning Threshold: {self.warning_threshold}%"
if self.error_threshold:
text += f" Error Threshold: {self.error_threshold}%"
if self.assigned_check.warning_threshold:
text += f" Warning Threshold: {self.assigned_check.warning_threshold}%"
if self.assigned_check.error_threshold:
text += f" Error Threshold: {self.assigned_check.error_threshold}%"
try:
percent_used = [
d["percent"] for d in self.agent.disks if d["device"] == self.disk
d["percent"]
for d in self.agent.disks
if d["device"] == self.assigned_check.disk
][0]
percent_free = 100 - percent_used
body = subject + f" - Free: {percent_free}%, {text}"
except:
body = subject + f" - Disk {self.disk} does not exist"
body = subject + f" - Disk {self.assigned_check.disk} does not exist"
elif self.check_type == "script":
elif self.assigned_check.check_type == CheckType.SCRIPT:
body = (
subject
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
)
elif self.check_type == "ping":
elif self.assigned_check.check_type == CheckType.PING:
body = self.more_info
elif self.check_type == "cpuload" or self.check_type == "memory":
elif self.assigned_check.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
text = ""
if self.warning_threshold:
text += f" Warning Threshold: {self.warning_threshold}%"
if self.error_threshold:
text += f" Error Threshold: {self.error_threshold}%"
if self.assigned_check.warning_threshold:
text += f" Warning Threshold: {self.assigned_check.warning_threshold}%"
if self.assigned_check.error_threshold:
text += f" Error Threshold: {self.assigned_check.error_threshold}%"
avg = int(mean(self.history))
if self.check_type == "cpuload":
if self.assigned_check.check_type == CheckType.CPU_LOAD:
body = subject + f" - Average CPU utilization: {avg}%, {text}"
elif self.check_type == "memory":
elif self.assigned_check.check_type == CheckType.MEMORY:
body = subject + f" - Average memory usage: {avg}%, {text}"
elif self.check_type == "winsvc":
elif self.assigned_check.check_type == CheckType.WINSVC:
body = subject + f" - Status: {self.more_info}"
elif self.check_type == "eventlog":
elif self.assigned_check.check_type == CheckType.EVENT_LOG:
if self.event_source and self.event_message:
start = f"Event ID {self.event_id}, source {self.event_source}, containing string {self.event_message} "
elif self.event_source:
start = f"Event ID {self.event_id}, source {self.event_source} "
elif self.event_message:
start = (
f"Event ID {self.event_id}, containing string {self.event_message} "
)
if self.assigned_check.event_source and self.assigned_check.event_message:
start = f"Event ID {self.assigned_check.event_id}, source {self.assigned_check.event_source}, containing string {self.assigned_check.event_message} "
elif self.assigned_check.event_source:
start = f"Event ID {self.assigned_check.event_id}, source {self.assigned_check.event_source} "
elif self.assigned_check.event_message:
start = f"Event ID {self.assigned_check.event_id}, containing string {self.assigned_check.event_message} "
else:
start = f"Event ID {self.event_id} "
start = f"Event ID {self.assigned_check.event_id} "
body = start + f"was found in the {self.log_name} log\n\n"
body = start + f"was found in the {self.assigned_check.log_name} log\n\n"
for i in self.extra_details["log"]:
try:
@@ -605,7 +617,7 @@ class Check(BaseAuditModel):
def send_sms(self):
CORE = CoreSettings.objects.first()
CORE = get_core_settings()
body: str = ""
if self.agent:
@@ -613,47 +625,49 @@ class Check(BaseAuditModel):
else:
subject = f"{self} Failed"
if self.check_type == "diskspace":
if self.assigned_check.check_type == CheckType.DISK_SPACE:
text = ""
if self.warning_threshold:
text += f" Warning Threshold: {self.warning_threshold}%"
if self.error_threshold:
text += f" Error Threshold: {self.error_threshold}%"
if self.assigned_check.warning_threshold:
text += f" Warning Threshold: {self.assigned_check.warning_threshold}%"
if self.assigned_check.error_threshold:
text += f" Error Threshold: {self.assigned_check.error_threshold}%"
try:
percent_used = [
d["percent"] for d in self.agent.disks if d["device"] == self.disk
d["percent"]
for d in self.agent.disks
if d["device"] == self.assigned_check.disk
][0]
percent_free = 100 - percent_used
body = subject + f" - Free: {percent_free}%, {text}"
except:
body = subject + f" - Disk {self.disk} does not exist"
body = subject + f" - Disk {self.assigned_check.disk} does not exist"
elif self.check_type == "script":
elif self.assigned_check.check_type == CheckType.SCRIPT:
body = subject + f" - Return code: {self.retcode}"
elif self.check_type == "ping":
elif self.assigned_check.check_type == CheckType.PING:
body = subject
elif self.check_type == "cpuload" or self.check_type == "memory":
elif self.assigned_check.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
text = ""
if self.warning_threshold:
text += f" Warning Threshold: {self.warning_threshold}%"
if self.error_threshold:
text += f" Error Threshold: {self.error_threshold}%"
if self.assigned_check.warning_threshold:
text += f" Warning Threshold: {self.assigned_check.warning_threshold}%"
if self.assigned_check.error_threshold:
text += f" Error Threshold: {self.assigned_check.error_threshold}%"
avg = int(mean(self.history))
if self.check_type == "cpuload":
if self.assigned_check.check_type == CheckType.CPU_LOAD:
body = subject + f" - Average CPU utilization: {avg}%, {text}"
elif self.check_type == "memory":
elif self.assigned_check.check_type == CheckType.MEMORY:
body = subject + f" - Average memory usage: {avg}%, {text}"
elif self.check_type == "winsvc":
elif self.assigned_check.check_type == CheckType.WINSVC:
body = subject + f" - Status: {self.more_info}"
elif self.check_type == "eventlog":
elif self.assigned_check.check_type == CheckType.EVENT_LOG:
body = subject
CORE.send_sms(body, alert_template=self.agent.alert_template)
def send_resolved_email(self):
CORE = CoreSettings.objects.first()
CORE = get_core_settings()
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
body = f"{self} is now back to normal"
@@ -661,7 +675,7 @@ class Check(BaseAuditModel):
CORE.send_mail(subject, body, alert_template=self.agent.alert_template)
def send_resolved_sms(self):
CORE = CoreSettings.objects.first()
CORE = get_core_settings()
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
CORE.send_sms(subject, alert_template=self.agent.alert_template)
@@ -671,6 +685,7 @@ class CheckHistory(models.Model):
objects = PermissionQuerySet.as_manager()
check_id = models.PositiveIntegerField(default=0)
agent_id = models.CharField(max_length=200, null=True, blank=True)
x = models.DateTimeField(auto_now_add=True)
y = models.PositiveIntegerField(null=True, blank=True, default=None)
results = models.JSONField(null=True, blank=True)

View File

@@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
class ChecksPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET" or r.method == "PATCH":
if "agent_id" in view.kwargs.keys():
return _has_perm(r, "can_list_checks") and _has_perm_on_agent(
@@ -17,7 +17,7 @@ class ChecksPerms(permissions.BasePermission):
class RunChecksPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
return _has_perm(r, "can_run_checks") and _has_perm_on_agent(
r.user, view.kwargs["agent_id"]
)

View File

@@ -1,11 +1,12 @@
import pytz
import validators as _v
from autotasks.models import AutomatedTask
from rest_framework import serializers
from autotasks.models import AutomatedTask
from scripts.models import Script
from scripts.serializers import ScriptCheckSerializer
from tacticalrmm.constants import CheckType
from .models import Check, CheckHistory
from .models import Check, CheckHistory, CheckResult
class AssignedTaskField(serializers.ModelSerializer):
@@ -14,13 +15,25 @@ class AssignedTaskField(serializers.ModelSerializer):
fields = "__all__"
class CheckResultSerializer(serializers.ModelSerializer):
class Meta:
model = CheckResult
fields = "__all__"
class CheckSerializer(serializers.ModelSerializer):
readable_desc = serializers.ReadOnlyField()
assigned_task = serializers.SerializerMethodField()
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
history_info = serializers.ReadOnlyField()
assignedtasks = AssignedTaskField(many=True, read_only=True)
alert_template = serializers.SerializerMethodField()
check_result = serializers.SerializerMethodField()
def get_check_result(self, obj):
return (
CheckResultSerializer(obj.check_result).data
if isinstance(obj.check_result, CheckResult)
else {}
)
def get_alert_template(self, obj):
if obj.agent:
@@ -38,15 +51,6 @@ class CheckSerializer(serializers.ModelSerializer):
"always_alert": alert_template.check_always_alert,
}
## Change to return only array of tasks after 9/25/2020
def get_assigned_task(self, obj):
if obj.assignedtask.exists():
tasks = obj.assignedtask.all()
if len(tasks) == 1:
return AssignedTaskField(tasks[0]).data
else:
return AssignedTaskField(tasks, many=True).data
class Meta:
model = Check
fields = "__all__"
@@ -65,12 +69,10 @@ class CheckSerializer(serializers.ModelSerializer):
# disk checks
# make sure no duplicate diskchecks exist for an agent/policy
if check_type == "diskspace":
if check_type == CheckType.DISK_SPACE:
if not self.instance: # only on create
checks = (
Check.objects.filter(**filter)
.filter(check_type="diskspace")
.exclude(managed_by_policy=True)
checks = Check.objects.filter(**filter).filter(
check_type=CheckType.DISK_SPACE
)
for check in checks:
if val["disk"] in check.disk:
@@ -93,7 +95,7 @@ class CheckSerializer(serializers.ModelSerializer):
)
# ping checks
if check_type == "ping":
if check_type == CheckType.PING:
if (
not _v.ipv4(val["ip"])
and not _v.ipv6(val["ip"])
@@ -103,12 +105,8 @@ class CheckSerializer(serializers.ModelSerializer):
"Please enter a valid IP address or domain name"
)
if check_type == "cpuload" and not self.instance:
if (
Check.objects.filter(**filter, check_type="cpuload")
.exclude(managed_by_policy=True)
.exists()
):
if check_type == CheckType.CPU_LOAD and not self.instance:
if Check.objects.filter(**filter, check_type=CheckType.CPU_LOAD).exists():
raise serializers.ValidationError(
"A cpuload check for this agent already exists"
)
@@ -127,12 +125,8 @@ class CheckSerializer(serializers.ModelSerializer):
f"Warning threshold must be less than Error Threshold"
)
if check_type == "memory" and not self.instance:
if (
Check.objects.filter(**filter, check_type="memory")
.exclude(managed_by_policy=True)
.exists()
):
if check_type == CheckType.MEMORY and not self.instance:
if Check.objects.filter(**filter, check_type=CheckType.MEMORY).exists():
raise serializers.ValidationError(
"A memory check for this agent already exists"
)
@@ -166,59 +160,34 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer):
script_args = serializers.SerializerMethodField()
def get_script_args(self, obj):
if obj.check_type != "script":
if obj.check_type != CheckType.SCRIPT:
return []
agent = self.context["agent"] if "agent" in self.context.keys() else obj.agent
return Script.parse_script_args(
agent=obj.agent, shell=obj.script.shell, args=obj.script_args
agent=agent, shell=obj.script.shell, args=obj.script_args
)
class Meta:
model = Check
exclude = [
"policy",
"managed_by_policy",
"overriden_by_policy",
"parent_check",
"overridden_by_policy",
"name",
"more_info",
"last_run",
"email_alert",
"text_alert",
"fails_b4_alert",
"fail_count",
"outage_history",
"extra_details",
"stdout",
"stderr",
"retcode",
"execution_time",
"svc_display_name",
"svc_policy_mode",
"created_by",
"created_time",
"modified_by",
"modified_time",
"history",
"dashboard_alert",
]
class CheckResultsSerializer(serializers.ModelSerializer):
# used when patching results from the windows agent
# no validation needed
class Meta:
model = Check
fields = "__all__"
class CheckHistorySerializer(serializers.ModelSerializer):
x = serializers.SerializerMethodField()
def get_x(self, obj):
return obj.x.astimezone(pytz.timezone(self.context["timezone"])).isoformat()
# used for return large amounts of graph data
class Meta:
model = CheckHistory

View File

@@ -1,23 +1,31 @@
import datetime as dt
import random
from time import sleep
from typing import Union
from typing import Optional
from django.utils import timezone as djangotime
from alerts.models import Alert
from checks.models import CheckResult
from tacticalrmm.celery import app
@app.task
def handle_check_email_alert_task(pk, alert_interval: Union[float, None] = None) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
def handle_check_email_alert_task(
pk: int, alert_interval: Optional[float] = None
) -> str:
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending email
if not alert.email_sent:
sleep(random.randint(1, 10))
alert.assigned_check.send_email()
check_result = CheckResult.objects.get(
assigned_check=alert.assigned_check, agent=alert.agent
)
sleep(random.randint(1, 5))
check_result.send_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
else:
@@ -25,8 +33,11 @@ def handle_check_email_alert_task(pk, alert_interval: Union[float, None] = None)
# send an email only if the last email sent is older than alert interval
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.email_sent < delta:
sleep(random.randint(1, 10))
alert.assigned_check.send_email()
check_result = CheckResult.objects.get(
assigned_check=alert.assigned_check, agent=alert.agent
)
sleep(random.randint(1, 5))
check_result.send_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
@@ -34,15 +45,20 @@ def handle_check_email_alert_task(pk, alert_interval: Union[float, None] = None)
@app.task
def handle_check_sms_alert_task(pk, alert_interval: Union[float, None] = None) -> str:
from alerts.models import Alert
def handle_check_sms_alert_task(pk: int, alert_interval: Optional[float] = None) -> str:
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending text
if not alert.sms_sent:
check_result = CheckResult.objects.get(
assigned_check=alert.assigned_check, agent=alert.agent
)
sleep(random.randint(1, 3))
alert.assigned_check.send_sms()
check_result.send_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
else:
@@ -50,8 +66,11 @@ def handle_check_sms_alert_task(pk, alert_interval: Union[float, None] = None) -
# send a text only if the last text sent is older than 24 hours
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.sms_sent < delta:
check_result = CheckResult.objects.get(
assigned_check=alert.assigned_check, agent=alert.agent
)
sleep(random.randint(1, 3))
alert.assigned_check.send_sms()
check_result.send_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
@@ -60,14 +79,19 @@ def handle_check_sms_alert_task(pk, alert_interval: Union[float, None] = None) -
@app.task
def handle_resolved_check_sms_alert_task(pk: int) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending text
if not alert.resolved_sms_sent:
check_result = CheckResult.objects.get(
assigned_check=alert.assigned_check, agent=alert.agent
)
sleep(random.randint(1, 3))
alert.assigned_check.send_resolved_sms()
check_result.send_resolved_sms()
alert.resolved_sms_sent = djangotime.now()
alert.save(update_fields=["resolved_sms_sent"])
@@ -76,14 +100,19 @@ def handle_resolved_check_sms_alert_task(pk: int) -> str:
@app.task
def handle_resolved_check_email_alert_task(pk: int) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
try:
alert = Alert.objects.get(pk=pk)
except Alert.DoesNotExist:
return "alert not found"
# first time sending email
if not alert.resolved_email_sent:
sleep(random.randint(1, 10))
alert.assigned_check.send_resolved_email()
check_result = CheckResult.objects.get(
assigned_check=alert.assigned_check, agent=alert.agent
)
sleep(random.randint(1, 5))
check_result.send_resolved_email()
alert.resolved_email_sent = djangotime.now()
alert.save(update_fields=["resolved_email_sent"])

View File

@@ -1,11 +1,11 @@
from unittest.mock import patch
from checks.models import CheckHistory
from django.conf import settings
from django.test import modify_settings
from django.utils import timezone as djangotime
from model_bakery import baker
from checks.models import CheckHistory, CheckResult
from tacticalrmm.constants import CheckStatus, CheckType, EvtLogFailWhen, EvtLogTypes
from tacticalrmm.test import TacticalTestCase
from .serializers import CheckSerializer
@@ -13,11 +13,6 @@ from .serializers import CheckSerializer
base_url = "/checks"
@modify_settings(
MIDDLEWARE={
"remove": "tacticalrmm.middleware.LinuxMiddleware",
}
)
class TestCheckViews(TacticalTestCase):
def setUp(self):
self.authenticate()
@@ -31,13 +26,13 @@ class TestCheckViews(TacticalTestCase):
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 8) # type: ignore
self.assertEqual(len(resp.data), 8)
# test checks agent url
url = f"/agents/{agent.agent_id}/checks/"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 4) # type: ignore
self.assertEqual(len(resp.data), 4)
# test agent doesn't exist
url = f"/agents/jh3498uf8fkh4ro8hfd8df98/checks/"
@@ -72,7 +67,7 @@ class TestCheckViews(TacticalTestCase):
serializer = CheckSerializer(disk_check)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, serializer.data) # type: ignore
self.assertEqual(resp.data, serializer.data)
self.check_not_authenticated("get", url)
def test_add_disk_check(self):
@@ -84,7 +79,7 @@ class TestCheckViews(TacticalTestCase):
agent_payload = {
"agent": agent.agent_id,
"check_type": "diskspace",
"check_type": CheckType.DISK_SPACE,
"disk": "C:",
"error_threshold": 55,
"warning_threshold": 0,
@@ -93,7 +88,7 @@ class TestCheckViews(TacticalTestCase):
policy_payload = {
"policy": policy.id,
"check_type": "diskspace",
"check_type": CheckType.DISK_SPACE,
"disk": "C:",
"error_threshold": 55,
"warning_threshold": 0,
@@ -133,7 +128,7 @@ class TestCheckViews(TacticalTestCase):
agent_payload = {
"agent": agent.agent_id,
"check_type": "cpuload",
"check_type": CheckType.CPU_LOAD,
"error_threshold": 66,
"warning_threshold": 0,
"fails_b4_alert": 9,
@@ -141,7 +136,7 @@ class TestCheckViews(TacticalTestCase):
policy_payload = {
"policy": policy.id,
"check_type": "cpuload",
"check_type": CheckType.CPU_LOAD,
"error_threshold": 66,
"warning_threshold": 0,
"fails_b4_alert": 9,
@@ -180,7 +175,7 @@ class TestCheckViews(TacticalTestCase):
agent_payload = {
"agent": agent.agent_id,
"check_type": "memory",
"check_type": CheckType.MEMORY,
"error_threshold": 78,
"warning_threshold": 0,
"fails_b4_alert": 1,
@@ -188,7 +183,7 @@ class TestCheckViews(TacticalTestCase):
policy_payload = {
"policy": policy.id,
"check_type": "memory",
"check_type": CheckType.MEMORY,
"error_threshold": 78,
"warning_threshold": 0,
"fails_b4_alert": 1,
@@ -254,35 +249,44 @@ class TestCheckViews(TacticalTestCase):
# setup data
agent = baker.make_recipe("agents.agent")
check = baker.make_recipe("checks.diskspace_check", agent=agent)
baker.make("checks.CheckHistory", check_id=check.id, _quantity=30)
check_result = baker.make(
"checks.CheckResult", assigned_check=check, agent=agent
)
baker.make(
"checks.CheckHistory",
check_id=check.id,
agent_id=agent.agent_id,
_quantity=30,
)
check_history_data = baker.make(
"checks.CheckHistory",
check_id=check.id,
agent_id=agent.agent_id,
_quantity=30,
)
# need to manually set the date back 35 days
for check_history in check_history_data: # type: ignore
check_history.x = djangotime.now() - djangotime.timedelta(days=35) # type: ignore
for check_history in check_history_data:
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
check_history.save()
# test invalid check pk
resp = self.client.patch("/checks/history/500/", format="json")
resp = self.client.patch("/checks/500/history/", format="json")
self.assertEqual(resp.status_code, 404)
url = f"/checks/{check.id}/history/"
url = f"/checks/{check_result.id}/history/"
# test with timeFilter last 30 days
data = {"timeFilter": 30}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 30) # type: ignore
self.assertEqual(len(resp.data), 30)
# test with timeFilter equal to 0
data = {"timeFilter": 0}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 60) # type: ignore
self.assertEqual(len(resp.data), 60)
self.check_not_authenticated("patch", url)
@@ -306,8 +310,8 @@ class TestCheckTasks(TacticalTestCase):
)
# need to manually set the date back 35 days
for check_history in check_history_data: # type: ignore
check_history.x = djangotime.now() - djangotime.timedelta(days=35) # type: ignore
for check_history in check_history_data:
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
check_history.save()
# prune data 30 days old
@@ -319,15 +323,14 @@ class TestCheckTasks(TacticalTestCase):
self.assertEqual(CheckHistory.objects.count(), 0)
def test_handle_script_check(self):
from checks.models import Check
url = "/api/v3/checkrunner/"
script = baker.make_recipe("checks.script_check", agent=self.agent)
check = baker.make_recipe("checks.script_check", agent=self.agent)
# test failing
data = {
"id": script.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"retcode": 500,
"stderr": "error",
"stdout": "message",
@@ -337,14 +340,15 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=script.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test passing
data = {
"id": script.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"retcode": 0,
"stderr": "error",
"stdout": "message",
@@ -354,16 +358,17 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=script.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "passing")
self.assertEqual(check_result.status, CheckStatus.PASSING)
# test failing info
script.info_return_codes = [20, 30, 50]
script.save()
check.info_return_codes = [20, 30, 50]
check.save()
data = {
"id": script.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"retcode": 30,
"stderr": "error",
"stdout": "message",
@@ -373,17 +378,18 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=script.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "info")
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "info")
# test failing warning
script.warning_return_codes = [80, 100, 1040]
script.save()
check.warning_return_codes = [80, 100, 1040]
check.save()
data = {
"id": script.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"retcode": 1040,
"stderr": "error",
"stdout": "message",
@@ -393,17 +399,15 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=script.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "warning")
def test_handle_diskspace_check(self):
from checks.models import Check
url = "/api/v3/checkrunner/"
diskspace = baker.make_recipe(
check = baker.make_recipe(
"checks.diskspace_check",
warning_threshold=20,
error_threshold=10,
@@ -412,7 +416,8 @@ class TestCheckTasks(TacticalTestCase):
# test warning threshold failure
data = {
"id": diskspace.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"exists": True,
"percent_used": 85,
"total": 500,
@@ -423,14 +428,15 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=diskspace.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "warning")
# test error failure
data = {
"id": diskspace.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"exists": True,
"percent_used": 95,
"total": 500,
@@ -441,27 +447,29 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=diskspace.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test disk not exist
data = {"id": diskspace.id, "exists": False}
data = {"id": check.id, "agent_id": self.agent.agent_id, "exists": False}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=diskspace.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test warning threshold 0
diskspace.warning_threshold = 0
diskspace.save()
check.warning_threshold = 0
check.save()
data = {
"id": diskspace.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"exists": True,
"percent_used": 95,
"total": 500,
@@ -472,16 +480,17 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=diskspace.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test error threshold 0
diskspace.warning_threshold = 50
diskspace.error_threshold = 0
diskspace.save()
check.warning_threshold = 50
check.error_threshold = 0
check.save()
data = {
"id": diskspace.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"exists": True,
"percent_used": 95,
"total": 500,
@@ -492,13 +501,14 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=diskspace.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "warning")
# test passing
data = {
"id": diskspace.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"exists": True,
"percent_used": 50,
"total": 500,
@@ -509,16 +519,14 @@ class TestCheckTasks(TacticalTestCase):
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=diskspace.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(new_check.status, "passing")
self.assertEqual(check_result.status, CheckStatus.PASSING)
def test_handle_cpuload_check(self):
from checks.models import Check
url = "/api/v3/checkrunner/"
cpuload = baker.make_recipe(
check = baker.make_recipe(
"checks.cpuload_check",
warning_threshold=70,
error_threshold=90,
@@ -526,81 +534,79 @@ class TestCheckTasks(TacticalTestCase):
)
# test failing warning
data = {"id": cpuload.id, "percent": 80}
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 80}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=cpuload.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "warning")
# test failing error
data = {"id": cpuload.id, "percent": 95}
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 95}
# reset check history
cpuload.history = []
cpuload.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=cpuload.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test passing
data = {"id": cpuload.id, "percent": 50}
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 50}
# reset check history
cpuload.history = []
cpuload.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=cpuload.id)
self.assertEqual(new_check.status, "passing")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.PASSING)
# test warning threshold 0
cpuload.warning_threshold = 0
cpuload.save()
data = {"id": cpuload.id, "percent": 95}
check.warning_threshold = 0
check.save()
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 95}
# reset check history
cpuload.history = []
cpuload.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=cpuload.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test error threshold 0
cpuload.warning_threshold = 50
cpuload.error_threshold = 0
cpuload.save()
data = {"id": cpuload.id, "percent": 95}
check.warning_threshold = 50
check.error_threshold = 0
check.save()
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 95}
# reset check history
cpuload.history = []
cpuload.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=cpuload.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "warning")
def test_handle_memory_check(self):
from checks.models import Check
url = "/api/v3/checkrunner/"
memory = baker.make_recipe(
check = baker.make_recipe(
"checks.memory_check",
warning_threshold=70,
error_threshold=90,
@@ -608,178 +614,193 @@ class TestCheckTasks(TacticalTestCase):
)
# test failing warning
data = {"id": memory.id, "percent": 80}
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 80}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=memory.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "warning")
# test failing error
data = {"id": memory.id, "percent": 95}
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 95}
# reset check history
memory.history = []
memory.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=memory.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test passing
data = {"id": memory.id, "percent": 50}
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 50}
# reset check history
memory.history = []
memory.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=memory.id)
self.assertEqual(new_check.status, "passing")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.PASSING)
# test warning threshold 0
memory.warning_threshold = 0
memory.save()
data = {"id": memory.id, "percent": 95}
check.warning_threshold = 0
check.save()
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 95}
# reset check history
memory.history = []
memory.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=memory.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "error")
# test error threshold 0
memory.warning_threshold = 50
memory.error_threshold = 0
memory.save()
data = {"id": memory.id, "percent": 95}
check.warning_threshold = 50
check.error_threshold = 0
check.save()
data = {"id": check.id, "agent_id": self.agent.agent_id, "percent": 95}
# reset check history
memory.history = []
memory.save()
check_result.history = []
check_result.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=memory.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check_result.alert_severity, "warning")
def test_handle_ping_check(self):
from checks.models import Check
url = "/api/v3/checkrunner/"
ping = baker.make_recipe(
check = baker.make_recipe(
"checks.ping_check", agent=self.agent, alert_severity="info"
)
# test failing info
data = {"id": ping.id, "status": "failing", "output": "reply from a.com"}
data = {
"id": check.id,
"agent_id": self.agent.agent_id,
"status": CheckStatus.FAILING,
"output": "reply from a.com",
}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=ping.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "info")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check.alert_severity, "info")
# test failing warning
ping.alert_severity = "warning"
ping.save()
check.alert_severity = "warning"
check.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=ping.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "warning")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check.alert_severity, "warning")
# test failing error
ping.alert_severity = "error"
ping.save()
check.alert_severity = "error"
check.save()
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=ping.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check.alert_severity, "error")
# test failing error
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=ping.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "error")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check.alert_severity, "error")
# test passing
data = {"id": ping.id, "status": "passing", "output": "reply from a.com"}
data = {
"id": check.id,
"agent_id": self.agent.agent_id,
"status": CheckStatus.PASSING,
"output": "reply from a.com",
}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=ping.id)
self.assertEqual(new_check.status, "passing")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.PASSING)
@patch("agents.models.Agent.nats_cmd")
def test_handle_winsvc_check(self, nats_cmd):
from checks.models import Check
url = "/api/v3/checkrunner/"
winsvc = baker.make_recipe(
check = baker.make_recipe(
"checks.winsvc_check", agent=self.agent, alert_severity="info"
)
# test passing running
data = {"id": winsvc.id, "status": "passing", "more_info": "ok"}
data = {
"id": check.id,
"agent_id": self.agent.agent_id,
"status": CheckStatus.PASSING,
"more_info": "ok",
}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=winsvc.id)
self.assertEqual(new_check.status, "passing")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.PASSING)
# test failing
data = {"id": winsvc.id, "status": "failing", "more_info": "ok"}
data = {
"id": check.id,
"agent_id": self.agent.agent_id,
"status": CheckStatus.FAILING,
"more_info": "ok",
}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=winsvc.id)
self.assertEqual(new_check.status, "failing")
self.assertEqual(new_check.alert_severity, "info")
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check.alert_severity, "info")
def test_handle_eventlog_check(self):
from checks.models import Check
url = "/api/v3/checkrunner/"
eventlog = baker.make_recipe(
check = baker.make_recipe(
"checks.eventlog_check",
event_type="warning",
fail_when="contains",
event_type=EvtLogTypes.WARNING,
fail_when=EvtLogFailWhen.CONTAINS,
event_id=123,
alert_severity="warning",
agent=self.agent,
)
data = {
"id": eventlog.id,
"id": check.id,
"agent_id": self.agent.agent_id,
"log": [
{
"eventType": "warning",
@@ -808,51 +829,51 @@ class TestCheckTasks(TacticalTestCase):
],
}
no_logs_data = {"id": eventlog.id, "log": []}
no_logs_data = {"id": check.id, "agent_id": self.agent.agent_id, "log": []}
# test failing when contains
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=eventlog.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEquals(new_check.alert_severity, "warning")
self.assertEquals(new_check.status, "failing")
self.assertEqual(check.alert_severity, "warning")
self.assertEqual(check_result.status, CheckStatus.FAILING)
# test passing when contains
resp = self.client.patch(url, no_logs_data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=eventlog.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEquals(new_check.status, "passing")
self.assertEqual(check_result.status, CheckStatus.PASSING)
# test failing when not contains and message and source
eventlog.fail_when = "not_contains"
eventlog.alert_severity = "error"
eventlog.save()
check.fail_when = EvtLogFailWhen.NOT_CONTAINS
check.alert_severity = "error"
check.save()
resp = self.client.patch(url, no_logs_data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=eventlog.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEquals(new_check.status, "failing")
self.assertEquals(new_check.alert_severity, "error")
self.assertEqual(check_result.status, CheckStatus.FAILING)
self.assertEqual(check.alert_severity, "error")
# test passing when contains with source and message
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
new_check = Check.objects.get(pk=eventlog.id)
check_result = CheckResult.objects.get(assigned_check=check, agent=self.agent)
self.assertEquals(new_check.status, "passing")
self.assertEqual(check_result.status, CheckStatus.PASSING)
class TestCheckPermissions(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
self.client_setup()
self.setup_client()
def test_get_checks_permissions(self):
agent = baker.make_recipe("agents.agent")
@@ -876,7 +897,7 @@ class TestCheckPermissions(TacticalTestCase):
)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
self.check_not_authorized("get", f"{base_url}/")
self.check_not_authorized("get", f"/agents/{agent.agent_id}/checks/")
@@ -890,15 +911,15 @@ class TestCheckPermissions(TacticalTestCase):
user.role.save()
r = self.check_authorized("get", f"{base_url}/")
self.assertEqual(len(r.data), 14) # type: ignore
self.assertEqual(len(r.data), 14)
r = self.check_authorized("get", f"/agents/{agent.agent_id}/checks/")
self.assertEqual(len(r.data), 5) # type: ignore
self.assertEqual(len(r.data), 5)
r = self.check_authorized(
"get", f"/agents/{unauthorized_agent.agent_id}/checks/"
)
self.assertEqual(len(r.data), 7) # type: ignore
self.assertEqual(len(r.data), 7)
r = self.check_authorized("get", f"/automation/policies/{policy.id}/checks/")
self.assertEqual(len(r.data), 2) # type: ignore
self.assertEqual(len(r.data), 2)
# test limiting to client
user.role.can_view_clients.set([agent.client])
@@ -910,7 +931,7 @@ class TestCheckPermissions(TacticalTestCase):
# make sure queryset is limited too
r = self.client.get(f"{base_url}/")
self.assertEqual(len(r.data), 7) # type: ignore
self.assertEqual(len(r.data), 7)
def test_add_check_permissions(self):
agent = baker.make_recipe("agents.agent")
@@ -919,7 +940,7 @@ class TestCheckPermissions(TacticalTestCase):
policy_data = {
"policy": policy.id,
"check_type": "diskspace",
"check_type": CheckType.DISK_SPACE,
"disk": "C:",
"error_threshold": 55,
"warning_threshold": 0,
@@ -928,7 +949,7 @@ class TestCheckPermissions(TacticalTestCase):
agent_data = {
"agent": agent.agent_id,
"check_type": "diskspace",
"check_type": CheckType.DISK_SPACE,
"disk": "C:",
"error_threshold": 55,
"warning_threshold": 0,
@@ -937,7 +958,7 @@ class TestCheckPermissions(TacticalTestCase):
unauthorized_agent_data = {
"agent": unauthorized_agent.agent_id,
"check_type": "diskspace",
"check_type": CheckType.DISK_SPACE,
"disk": "C:",
"error_threshold": 55,
"warning_threshold": 0,
@@ -951,7 +972,7 @@ class TestCheckPermissions(TacticalTestCase):
self.check_authorized_superuser("post", url, data)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized("post", url, data)
@@ -992,7 +1013,7 @@ class TestCheckPermissions(TacticalTestCase):
self.check_authorized_superuser(method, policy_url)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized(method, url)
@@ -1024,12 +1045,22 @@ class TestCheckPermissions(TacticalTestCase):
agent = baker.make_recipe("agents.agent")
unauthorized_agent = baker.make_recipe("agents.agent")
check = baker.make("checks.Check", agent=agent)
check_result = baker.make(
"checks.CheckResult", agent=agent, assigned_check=check
)
unauthorized_check = baker.make("checks.Check", agent=unauthorized_agent)
unauthorized_check_result = baker.make(
"checks.CheckResult",
agent=unauthorized_agent,
assigned_check=unauthorized_check,
)
for action in ["reset", "run"]:
if action == "reset":
url = f"{base_url}/{check.id}/{action}/"
unauthorized_url = f"{base_url}/{unauthorized_check.id}/{action}/"
url = f"{base_url}/{check_result.id}/{action}/"
unauthorized_url = (
f"{base_url}/{unauthorized_check_result.id}/{action}/"
)
else:
url = f"{base_url}/{agent.agent_id}/{action}/"
unauthorized_url = f"{base_url}/{unauthorized_agent.agent_id}/{action}/"
@@ -1039,7 +1070,7 @@ class TestCheckPermissions(TacticalTestCase):
self.check_authorized_superuser("post", unauthorized_url)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized("post", url)
@@ -1066,17 +1097,25 @@ class TestCheckPermissions(TacticalTestCase):
agent = baker.make_recipe("agents.agent")
unauthorized_agent = baker.make_recipe("agents.agent")
check = baker.make("checks.Check", agent=agent)
check_result = baker.make(
"checks.CheckResult", agent=agent, assigned_check=check
)
unauthorized_check = baker.make("checks.Check", agent=unauthorized_agent)
unauthorized_check_result = baker.make(
"checks.CheckResult",
agent=unauthorized_agent,
assigned_check=unauthorized_check,
)
url = f"{base_url}/{check.id}/history/"
unauthorized_url = f"{base_url}/{unauthorized_check.id}/history/"
url = f"{base_url}/{check_result.id}/history/"
unauthorized_url = f"{base_url}/{unauthorized_check_result.id}/history/"
# test superuser access
self.check_authorized_superuser("patch", url)
self.check_authorized_superuser("patch", unauthorized_url)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized("patch", url)
@@ -1098,12 +1137,3 @@ class TestCheckPermissions(TacticalTestCase):
self.check_authorized("patch", url)
self.check_not_authorized("patch", unauthorized_url)
def test_policy_fields_to_copy_exists(self):
from .models import Check
fields = [i.name for i in Check._meta.get_fields()]
check = baker.make("checks.Check")
for i in check.policy_fields_to_copy: # type: ignore
self.assertIn(i, fields)

View File

@@ -1,4 +1,4 @@
def bytes2human(n):
def bytes2human(n: int) -> str:
# http://code.activestate.com/recipes/578019
symbols = ("K", "M", "G", "T", "P", "E", "Z", "Y")
prefix = {}

View File

@@ -1,8 +1,6 @@
import asyncio
from datetime import datetime as dt
from agents.models import Agent
from automation.models import Policy
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
@@ -12,10 +10,14 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from agents.models import Agent
from alerts.models import Alert
from automation.models import Policy
from tacticalrmm.constants import CheckStatus, CheckType
from tacticalrmm.helpers import notify_error
from tacticalrmm.permissions import _has_perm_on_agent
from tacticalrmm.utils import notify_error
from .models import Check, CheckHistory
from .models import Check, CheckHistory, CheckResult
from .permissions import ChecksPerms, RunChecksPerms
from .serializers import CheckHistorySerializer, CheckSerializer
@@ -26,16 +28,15 @@ class GetAddChecks(APIView):
def get(self, request, agent_id=None, policy=None):
if agent_id:
agent = get_object_or_404(Agent, agent_id=agent_id)
checks = Check.objects.filter(agent=agent)
checks = agent.get_checks_with_policies()
elif policy:
policy = get_object_or_404(Policy, id=policy)
checks = Check.objects.filter(policy=policy)
else:
checks = Check.objects.filter_by_role(request.user)
checks = Check.objects.filter_by_role(request.user) # type: ignore
return Response(CheckSerializer(checks, many=True).data)
def post(self, request):
from automation.tasks import generate_agent_checks_task
data = request.data.copy()
# Determine if adding check to Agent and replace agent_id with pk
@@ -48,34 +49,13 @@ class GetAddChecks(APIView):
# set event id to 0 if wildcard because it needs to be an integer field for db
# will be ignored anyway by the agent when doing wildcard check
if data["check_type"] == "eventlog" and data["event_id_is_wildcard"]:
if data["check_type"] == CheckType.EVENT_LOG and data["event_id_is_wildcard"]:
data["event_id"] = 0
serializer = CheckSerializer(data=data, partial=True)
serializer.is_valid(raise_exception=True)
new_check = serializer.save()
# Generate policy Checks
if "policy" in data.keys():
generate_agent_checks_task.delay(policy=data["policy"])
elif "agent" in data.keys():
checks = agent.agentchecks.filter( # type: ignore
check_type=new_check.check_type, managed_by_policy=True
)
# Should only be one
duplicate_check = [
check for check in checks if check.is_duplicate(new_check)
]
if duplicate_check:
policy = Check.objects.get(pk=duplicate_check[0].parent_check).policy
if policy.enforced:
new_check.overriden_by_policy = True
new_check.save()
else:
duplicate_check[0].delete()
return Response(f"{new_check.readable_desc} was added!")
@@ -90,8 +70,6 @@ class GetUpdateDeleteCheck(APIView):
return Response(CheckSerializer(check).data)
def put(self, request, pk):
from automation.tasks import update_policy_check_fields_task
check = get_object_or_404(Check, pk=pk)
data = request.data.copy()
@@ -104,7 +82,7 @@ class GetUpdateDeleteCheck(APIView):
# set event id to 0 if wildcard because it needs to be an integer field for db
# will be ignored anyway by the agent when doing wildcard check
if check.check_type == "eventlog":
if check.check_type == CheckType.EVENT_LOG:
try:
data["event_id_is_wildcard"]
except KeyError:
@@ -117,14 +95,9 @@ class GetUpdateDeleteCheck(APIView):
serializer.is_valid(raise_exception=True)
check = serializer.save()
if check.policy:
update_policy_check_fields_task.delay(check=check.pk)
return Response(f"{check.readable_desc} was edited!")
def delete(self, request, pk):
from automation.tasks import generate_agent_checks_task
check = get_object_or_404(Check, pk=pk)
if check.agent and not _has_perm_on_agent(request.user, check.agent.agent_id):
@@ -132,18 +105,6 @@ class GetUpdateDeleteCheck(APIView):
check.delete()
# Policy check deleted
if check.policy:
Check.objects.filter(managed_by_policy=True, parent_check=pk).delete()
# Re-evaluate agent checks is policy was enforced
if check.policy.enforced:
generate_agent_checks_task.delay(policy=check.policy.pk)
# Agent check deleted
elif check.agent:
generate_agent_checks_task.delay(agents=[check.agent.pk])
return Response(f"{check.readable_desc} was deleted!")
@@ -151,17 +112,20 @@ class ResetCheck(APIView):
permission_classes = [IsAuthenticated, ChecksPerms]
def post(self, request, pk):
check = get_object_or_404(Check, pk=pk)
result = get_object_or_404(CheckResult, pk=pk)
if check.agent and not _has_perm_on_agent(request.user, check.agent.agent_id):
if result.agent and not _has_perm_on_agent(request.user, result.agent.agent_id):
raise PermissionDenied()
check.status = "passing"
check.save()
result.status = CheckStatus.PASSING
result.save()
# resolve any alerts that are open
if check.alert.filter(resolved=False).exists():
check.alert.get(resolved=False).resolve()
alert = Alert.create_or_return_check_alert(
result.assigned_check, agent=result.agent, skip_create=True
)
if alert:
alert.resolve()
return Response("The check status was reset")
@@ -170,9 +134,9 @@ class GetCheckHistory(APIView):
permission_classes = [IsAuthenticated, ChecksPerms]
def patch(self, request, pk):
check = get_object_or_404(Check, pk=pk)
result = get_object_or_404(CheckResult, pk=pk)
if check.agent and not _has_perm_on_agent(request.user, check.agent.agent_id):
if result.agent and not _has_perm_on_agent(request.user, result.agent.agent_id):
raise PermissionDenied()
timeFilter = Q()
@@ -185,14 +149,16 @@ class GetCheckHistory(APIView):
- djangotime.timedelta(days=request.data["timeFilter"]),
)
check_history = CheckHistory.objects.filter(check_id=pk).filter(timeFilter).order_by("-x") # type: ignore
return Response(
CheckHistorySerializer(
check_history, context={"timezone": check.agent.timezone}, many=True
).data
check_history = (
CheckHistory.objects.filter(
check_id=result.assigned_check.id, agent_id=result.agent.agent_id
)
.filter(timeFilter)
.order_by("-x")
)
return Response(CheckHistorySerializer(check_history, many=True).data)
@api_view(["POST"])
@permission_classes([IsAuthenticated, RunChecksPerms])

View File

@@ -1,8 +1,9 @@
# Generated by Django 3.2.10 on 2021-12-26 05:47
import clients.models
from django.db import migrations, models
import clients.models
class Migration(migrations.Migration):

View File

@@ -0,0 +1,21 @@
# Generated by Django 4.0.3 on 2022-04-15 01:53
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clients', '0020_auto_20211226_0547'),
]
operations = [
migrations.RemoveField(
model_name='client',
name='agent_count',
),
migrations.RemoveField(
model_name='site',
name='agent_count',
),
]

View File

@@ -1,15 +1,17 @@
import uuid
from typing import Dict
from django.contrib.postgres.fields import ArrayField
from django.core.cache import cache
from django.db import models
from agents.models import Agent
from django.contrib.postgres.fields import ArrayField
from django.db import models
from logs.models import BaseAuditModel
from tacticalrmm.constants import AGENT_DEFER
from tacticalrmm.models import PermissionQuerySet
def _default_failing_checks_data():
def _default_failing_checks_data() -> Dict[str, bool]:
return {"error": False, "warning": False}
@@ -19,7 +21,6 @@ class Client(BaseAuditModel):
name = models.CharField(max_length=255, unique=True)
block_policy_inheritance = models.BooleanField(default=False)
failing_checks = models.JSONField(default=_default_failing_checks_data)
agent_count = models.PositiveIntegerField(default=0)
workstation_policy = models.ForeignKey(
"automation.Policy",
related_name="workstation_clients",
@@ -45,7 +46,6 @@ class Client(BaseAuditModel):
def save(self, *args, **kwargs):
from alerts.tasks import cache_agents_alert_template
from automation.tasks import generate_agent_checks_task
# get old client if exists
old_client = Client.objects.get(pk=self.pk) if self.pk else None
@@ -56,21 +56,25 @@ class Client(BaseAuditModel):
)
# check if polcies have changed and initiate task to reapply policies if so
if old_client:
if (
(old_client.server_policy != self.server_policy)
or (old_client.workstation_policy != self.workstation_policy)
or (
old_client.block_policy_inheritance != self.block_policy_inheritance
)
):
generate_agent_checks_task.delay(
client=self.pk,
create_tasks=True,
)
if old_client and (
old_client.alert_template != self.alert_template
or old_client.workstation_policy != self.workstation_policy
or old_client.server_policy != self.server_policy
):
cache_agents_alert_template.delay()
if old_client.alert_template != self.alert_template:
cache_agents_alert_template.delay()
if old_client and (
old_client.workstation_policy != self.workstation_policy
or old_client.server_policy != self.server_policy
):
sites = self.sites.all()
if old_client.workstation_policy != self.workstation_policy:
for site in sites:
cache.delete_many_pattern(f"site_workstation_{site.pk}_*")
if old_client.server_policy != self.server_policy:
for site in sites:
cache.delete_many_pattern(f"site_server_{site.pk}_*")
class Meta:
ordering = ("name",)
@@ -78,15 +82,6 @@ class Client(BaseAuditModel):
def __str__(self):
return self.name
@property
def has_maintenanace_mode_agents(self):
return (
Agent.objects.defer(*AGENT_DEFER)
.filter(site__client=self, maintenance_mode=True)
.count()
> 0
)
@property
def live_agent_count(self) -> int:
return Agent.objects.defer(*AGENT_DEFER).filter(site__client=self).count()
@@ -106,7 +101,6 @@ class Site(BaseAuditModel):
name = models.CharField(max_length=255)
block_policy_inheritance = models.BooleanField(default=False)
failing_checks = models.JSONField(default=_default_failing_checks_data)
agent_count = models.PositiveIntegerField(default=0)
workstation_policy = models.ForeignKey(
"automation.Policy",
related_name="workstation_sites",
@@ -132,7 +126,6 @@ class Site(BaseAuditModel):
def save(self, *args, **kwargs):
from alerts.tasks import cache_agents_alert_template
from automation.tasks import generate_agent_checks_task
# get old client if exists
old_site = Site.objects.get(pk=self.pk) if self.pk else None
@@ -145,15 +138,18 @@ class Site(BaseAuditModel):
# check if polcies have changed and initiate task to reapply policies if so
if old_site:
if (
(old_site.server_policy != self.server_policy)
or (old_site.workstation_policy != self.workstation_policy)
or (old_site.block_policy_inheritance != self.block_policy_inheritance)
old_site.alert_template != self.alert_template
or old_site.workstation_policy != self.workstation_policy
or old_site.server_policy != self.server_policy
):
generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
if old_site.alert_template != self.alert_template:
cache_agents_alert_template.delay()
if old_site.workstation_policy != self.workstation_policy:
cache.delete_many_pattern(f"site_workstation_{self.pk}_*")
if old_site.server_policy != self.server_policy:
cache.delete_many_pattern(f"site_server_{self.pk}_*")
class Meta:
ordering = ("name",)
unique_together = (("client", "name"),)
@@ -161,10 +157,6 @@ class Site(BaseAuditModel):
def __str__(self):
return self.name
@property
def has_maintenanace_mode_agents(self):
return self.agents.defer(*AGENT_DEFER).filter(maintenance_mode=True).count() > 0 # type: ignore
@property
def live_agent_count(self) -> int:
return self.agents.defer(*AGENT_DEFER).count() # type: ignore

View File

@@ -4,7 +4,7 @@ from tacticalrmm.permissions import _has_perm, _has_perm_on_client, _has_perm_on
class ClientsPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
if "pk" in view.kwargs.keys():
return _has_perm(r, "can_list_clients") and _has_perm_on_client(
@@ -21,7 +21,7 @@ class ClientsPerms(permissions.BasePermission):
class SitesPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
if "pk" in view.kwargs.keys():
return _has_perm(r, "can_list_sites") and _has_perm_on_site(
@@ -38,7 +38,7 @@ class SitesPerms(permissions.BasePermission):
class DeploymentPerms(permissions.BasePermission):
def has_permission(self, r, view):
def has_permission(self, r, view) -> bool:
if r.method == "GET":
return _has_perm(r, "can_list_deployments")
else:

View File

@@ -30,7 +30,8 @@ class SiteCustomFieldSerializer(ModelSerializer):
class SiteSerializer(ModelSerializer):
client_name = ReadOnlyField(source="client.name")
custom_fields = SiteCustomFieldSerializer(many=True, read_only=True)
maintenance_mode = ReadOnlyField(source="has_maintenanace_mode_agents")
maintenance_mode = ReadOnlyField()
agent_count = ReadOnlyField()
class Meta:
model = Site
@@ -92,11 +93,12 @@ class ClientCustomFieldSerializer(ModelSerializer):
class ClientSerializer(ModelSerializer):
sites = SerializerMethodField()
custom_fields = ClientCustomFieldSerializer(many=True, read_only=True)
maintenance_mode = ReadOnlyField(source="has_maintenanace_mode_agents")
maintenance_mode = ReadOnlyField()
agent_count = ReadOnlyField()
def get_sites(self, obj):
return SiteSerializer(
obj.sites.select_related("client").filter_by_role(self.context["user"]),
obj.filtered_sites,
many=True,
).data

View File

@@ -97,7 +97,7 @@ class TestClientViews(TacticalTestCase):
payload = {
"client": {"name": "Custom Field Client"},
"site": {"name": "Setup Site"},
"custom_fields": [{"field": field.id, "string_value": "new Value"}], # type: ignore
"custom_fields": [{"field": field.id, "string_value": "new Value"}],
}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
@@ -113,7 +113,7 @@ class TestClientViews(TacticalTestCase):
# setup data
client = baker.make("clients.Client")
url = f"{base_url}/{client.id}/" # type: ignore
url = f"{base_url}/{client.id}/"
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)
@@ -129,7 +129,7 @@ class TestClientViews(TacticalTestCase):
# test successfull edit client
data = {"client": {"name": "NewClientName"}, "custom_fields": []}
url = f"{base_url}/{client.id}/" # type: ignore
url = f"{base_url}/{client.id}/"
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertTrue(Client.objects.filter(name="NewClientName").exists())
@@ -144,10 +144,10 @@ class TestClientViews(TacticalTestCase):
field = baker.make("core.CustomField", model="client", type="checkbox")
payload = {
"client": {
"id": client.id, # type: ignore
"id": client.id,
"name": "Custom Field Client",
},
"custom_fields": [{"field": field.id, "bool_value": True}], # type: ignore
"custom_fields": [{"field": field.id, "bool_value": True}],
}
r = self.client.put(url, payload, format="json")
self.assertEqual(r.status_code, 200)
@@ -160,10 +160,10 @@ class TestClientViews(TacticalTestCase):
# edit custom field value
payload = {
"client": {
"id": client.id, # type: ignore
"id": client.id,
"name": "Custom Field Client",
},
"custom_fields": [{"field": field.id, "bool_value": False}], # type: ignore
"custom_fields": [{"field": field.id, "bool_value": False}],
}
r = self.client.put(url, payload, format="json")
self.assertEqual(r.status_code, 200)
@@ -187,14 +187,14 @@ class TestClientViews(TacticalTestCase):
r = self.client.delete(f"{base_url}/334/", format="json")
self.assertEqual(r.status_code, 404)
url = f"/clients/{client_to_delete.id}/?site_to_move={site_to_move.id}" # type: ignore
url = f"/clients/{client_to_delete.id}/?site_to_move={site_to_move.id}"
# test successful deletion
r = self.client.delete(url, format="json")
self.assertEqual(r.status_code, 200)
agent_moved = Agent.objects.get(pk=agent.pk)
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
self.assertFalse(Client.objects.filter(pk=client_to_delete.id).exists()) # type: ignore
self.assertEqual(agent_moved.site.id, site_to_move.id)
self.assertFalse(Client.objects.filter(pk=client_to_delete.id).exists())
self.check_not_authenticated("delete", url)
@@ -207,7 +207,7 @@ class TestClientViews(TacticalTestCase):
r = self.client.get(url, format="json")
serializer = SiteSerializer(sites, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, serializer.data) # type: ignore
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
@@ -220,7 +220,7 @@ class TestClientViews(TacticalTestCase):
# test success add
payload = {
"site": {"client": client.id, "name": "LA Office"}, # type: ignore
"site": {"client": client.id, "name": "LA Office"},
"custom_fields": [],
}
r = self.client.post(url, payload, format="json")
@@ -228,7 +228,7 @@ class TestClientViews(TacticalTestCase):
# test with | symbol
payload = {
"site": {"client": client.id, "name": "LA Office |*&@#$"}, # type: ignore
"site": {"client": client.id, "name": "LA Office |*&@#$"},
"custom_fields": [],
}
serializer = SiteSerializer(data=payload["site"])
@@ -242,7 +242,7 @@ class TestClientViews(TacticalTestCase):
# test site already exists
payload = {
"site": {"client": site.client.id, "name": "LA Office"}, # type: ignore
"site": {"client": site.client.id, "name": "LA Office"},
"custom_fields": [],
}
serializer = SiteSerializer(data=payload["site"])
@@ -259,8 +259,8 @@ class TestClientViews(TacticalTestCase):
options=["one", "two", "three"],
)
payload = {
"site": {"client": client.id, "name": "Custom Field Site"}, # type: ignore
"custom_fields": [{"field": field.id, "string_value": "one"}], # type: ignore
"site": {"client": client.id, "name": "Custom Field Site"},
"custom_fields": [{"field": field.id, "string_value": "one"}],
}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
@@ -274,11 +274,11 @@ class TestClientViews(TacticalTestCase):
# setup data
site = baker.make("clients.Site")
url = f"{base_url}/sites/{site.id}/" # type: ignore
url = f"{base_url}/sites/{site.id}/"
r = self.client.get(url, format="json")
serializer = SiteSerializer(site)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, serializer.data) # type: ignore
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
@@ -292,11 +292,11 @@ class TestClientViews(TacticalTestCase):
self.assertEqual(r.status_code, 404)
data = {
"site": {"client": client.id, "name": "New Site Name"}, # type: ignore
"site": {"client": client.id, "name": "New Site Name"},
"custom_fields": [],
}
url = f"{base_url}/sites/{site.id}/" # type: ignore
url = f"{base_url}/sites/{site.id}/"
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertTrue(
@@ -312,11 +312,11 @@ class TestClientViews(TacticalTestCase):
)
payload = {
"site": {
"id": site.id, # type: ignore
"client": site.client.id, # type: ignore
"id": site.id,
"client": site.client.id,
"name": "Custom Field Site",
},
"custom_fields": [{"field": field.id, "multiple_value": ["two", "three"]}], # type: ignore
"custom_fields": [{"field": field.id, "multiple_value": ["two", "three"]}],
}
r = self.client.put(url, payload, format="json")
self.assertEqual(r.status_code, 200)
@@ -327,11 +327,11 @@ class TestClientViews(TacticalTestCase):
# edit custom field value
payload = {
"site": {
"id": site.id, # type: ignore
"client": client.id, # type: ignore
"id": site.id,
"client": client.id,
"name": "Custom Field Site",
},
"custom_fields": [{"field": field.id, "multiple_value": ["one"]}], # type: ignore
"custom_fields": [{"field": field.id, "multiple_value": ["one"]}],
}
r = self.client.put(url, payload, format="json")
self.assertEqual(r.status_code, 200)
@@ -356,7 +356,7 @@ class TestClientViews(TacticalTestCase):
r = self.client.delete("{base_url}/500/", format="json")
self.assertEqual(r.status_code, 404)
url = f"/clients/sites/{site_to_delete.id}/?move_to_site={site_to_move.id}" # type: ignore
url = f"/clients/sites/{site_to_delete.id}/?move_to_site={site_to_move.id}"
# test deleting with last site under client
r = self.client.delete(url, format="json")
@@ -364,12 +364,12 @@ class TestClientViews(TacticalTestCase):
self.assertEqual(r.json(), "A client must have at least 1 site.")
# test successful deletion
site_to_move.client = client # type: ignore
site_to_move.save(update_fields=["client"]) # type: ignore
site_to_move.client = client
site_to_move.save(update_fields=["client"])
r = self.client.delete(url, format="json")
self.assertEqual(r.status_code, 200)
agent_moved = Agent.objects.get(pk=agent.pk)
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
self.assertEqual(agent_moved.site.id, site_to_move.id)
self.check_not_authenticated("delete", url)
@@ -381,7 +381,7 @@ class TestClientViews(TacticalTestCase):
r = self.client.get(url)
serializer = DeploymentSerializer(deployments, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, serializer.data) # type: ignore
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
@@ -391,9 +391,9 @@ class TestClientViews(TacticalTestCase):
url = f"{base_url}/deployments/"
payload = {
"client": site.client.id, # type: ignore
"site": site.id, # type: ignore
"expires": "2037-11-23 18:53",
"client": site.client.id,
"site": site.id,
"expires": "2037-11-23T18:53:04-04:00",
"power": 1,
"ping": 0,
"rdp": 1,
@@ -418,10 +418,10 @@ class TestClientViews(TacticalTestCase):
# setup data
deployment = baker.make("clients.Deployment")
url = f"{base_url}/deployments/{deployment.id}/" # type: ignore
url = f"{base_url}/deployments/{deployment.id}/"
r = self.client.delete(url)
self.assertEqual(r.status_code, 200)
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists()) # type: ignore
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists())
url = f"{base_url}/deployments/32348/"
r = self.client.delete(url)
@@ -435,7 +435,7 @@ class TestClientViews(TacticalTestCase):
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
self.assertEqual(r.data, "invalid") # type: ignore
self.assertEqual(r.data, "invalid")
uid = uuid.uuid4()
url = f"/clients/{uid}/deploy/"
@@ -456,13 +456,13 @@ class TestClientViews(TacticalTestCase):
class TestClientPermissions(TacticalTestCase):
def setUp(self):
self.client_setup()
self.setup_client()
self.setup_coresettings()
def test_get_clients_permissions(self):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
url = f"{base_url}/"
@@ -479,17 +479,17 @@ class TestClientPermissions(TacticalTestCase):
# all agents should be returned
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 5) # type: ignore
self.assertEqual(len(response.data), 5)
# limit user to specific client. only 1 client should be returned
user.role.can_view_clients.set([clients[3]])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 1) # type: ignore
self.assertEqual(len(response.data), 1)
# 2 should be returned now
user.role.can_view_clients.set([clients[0], clients[1]])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 2) # type: ignore
self.assertEqual(len(response.data), 2)
# limit to a specific site. The site shouldn't be in client returned sites
sites = baker.make("clients.Site", client=clients[4], _quantity=3)
@@ -498,8 +498,8 @@ class TestClientPermissions(TacticalTestCase):
user.role.can_view_sites.set([sites[0]])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 3) # type: ignore
for client in response.data: # type: ignore
self.assertEqual(len(response.data), 3)
for client in response.data:
if client["id"] == clients[0].id:
self.assertEqual(len(client["sites"]), 4)
elif client["id"] == clients[1].id:
@@ -522,7 +522,7 @@ class TestClientPermissions(TacticalTestCase):
self.check_authorized_superuser("post", url, data)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized("post", url, data)
@@ -537,7 +537,7 @@ class TestClientPermissions(TacticalTestCase):
def test_get_edit_delete_clients_permissions(self, delete):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
client = baker.make("clients.Client")
unauthorized_client = baker.make("clients.Client")
@@ -575,7 +575,7 @@ class TestClientPermissions(TacticalTestCase):
def test_get_sites_permissions(self):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
url = f"{base_url}/sites/"
@@ -593,28 +593,28 @@ class TestClientPermissions(TacticalTestCase):
# all sites should be returned
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 10) # type: ignore
self.assertEqual(len(response.data), 10)
# limit user to specific site. only 1 site should be returned
user.role.can_view_sites.set([sites[3]])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 1) # type: ignore
self.assertEqual(len(response.data), 1)
# 2 should be returned now
user.role.can_view_sites.set([sites[0], sites[1]])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 2) # type: ignore
self.assertEqual(len(response.data), 2)
# check if limiting user to client works
user.role.can_view_sites.clear()
user.role.can_view_clients.set([clients[0]])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 4) # type: ignore
self.assertEqual(len(response.data), 4)
# add a site to see if the results still work
user.role.can_view_sites.set([sites[1], sites[0]])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 5) # type: ignore
self.assertEqual(len(response.data), 5)
# make sure superusers work
self.check_authorized_superuser("get", url)
@@ -632,7 +632,7 @@ class TestClientPermissions(TacticalTestCase):
self.check_authorized_superuser("post", url, data)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized("post", url, data)
@@ -655,7 +655,7 @@ class TestClientPermissions(TacticalTestCase):
def test_get_edit_delete_sites_permissions(self, delete):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
site = baker.make("clients.Site")
unauthorized_site = baker.make("clients.Site")
@@ -714,7 +714,7 @@ class TestClientPermissions(TacticalTestCase):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# user with empty role should fail
self.check_not_authorized("get", url)
@@ -725,23 +725,23 @@ class TestClientPermissions(TacticalTestCase):
# all sites should be returned
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 12) # type: ignore
self.assertEqual(len(response.data), 12)
# limit user to specific site. only 1 site should be returned
user.role.can_view_sites.set([site])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 5) # type: ignore
self.assertEqual(len(response.data), 5)
# all should be returned now
user.role.can_view_clients.set([other_site.client])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 12) # type: ignore
self.assertEqual(len(response.data), 12)
# check if limiting user to client works
user.role.can_view_sites.clear()
user.role.can_view_clients.set([other_site.client])
response = self.check_authorized("get", url)
self.assertEqual(len(response.data), 7) # type: ignore
self.assertEqual(len(response.data), 7)
@patch("clients.models.Deployment.save")
def test_add_deployments_permissions(self, save):
@@ -762,7 +762,7 @@ class TestClientPermissions(TacticalTestCase):
self.check_authorized_superuser("post", url, data)
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# test user without role
self.check_not_authorized("post", url, data)
@@ -802,7 +802,7 @@ class TestClientPermissions(TacticalTestCase):
# create user with empty role
user = self.create_user_with_roles([])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
# make sure user with empty role is unauthorized
self.check_not_authorized("delete", url)
@@ -842,7 +842,7 @@ class TestClientPermissions(TacticalTestCase):
# when a user that is limited to a specific subset of clients creates a client. It should allow access to that client
client = baker.make("clients.Client")
user = self.create_user_with_roles(["can_manage_clients"])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
user.role.can_view_clients.set([client])
data = {"client": {"name": "New Client"}, "site": {"name": "New Site"}}
@@ -850,7 +850,7 @@ class TestClientPermissions(TacticalTestCase):
self.client.post(f"{base_url}/", data, format="json")
# make sure two clients are allowed now
self.assertEqual(User.objects.get(id=user.id).role.can_view_clients.count(), 2)
self.assertEqual(User.objects.get(id=user.pk).role.can_view_clients.count(), 2)
def test_restricted_user_creating_sites(self):
from accounts.models import User
@@ -858,7 +858,7 @@ class TestClientPermissions(TacticalTestCase):
# when a user that is limited to a specific subset of clients creates a client. It should allow access to that client
site = baker.make("clients.Site")
user = self.create_user_with_roles(["can_manage_sites"])
self.client.force_authenticate(user=user) # type: ignore
self.client.force_authenticate(user=user)
user.role.can_view_sites.set([site])
data = {"site": {"client": site.client.id, "name": "New Site"}}
@@ -866,4 +866,4 @@ class TestClientPermissions(TacticalTestCase):
self.client.post(f"{base_url}/sites/", data, format="json")
# make sure two sites are allowed now
self.assertEqual(User.objects.get(id=user.id).role.can_view_sites.count(), 2)
self.assertEqual(User.objects.get(id=user.pk).role.can_view_sites.count(), 2)

View File

@@ -2,18 +2,19 @@ import datetime as dt
import re
import uuid
import pytz
from agents.models import Agent
from core.models import CoreSettings
from django.db.models import Count, Exists, OuterRef, Prefetch, prefetch_related_objects
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from knox.models import AuthToken
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from agents.models import Agent
from core.utils import get_core_settings
from tacticalrmm.helpers import notify_error
from tacticalrmm.permissions import _has_perm_on_client, _has_perm_on_site
from tacticalrmm.utils import notify_error
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
from .permissions import ClientsPerms, DeploymentPerms, SitesPerms
@@ -30,12 +31,42 @@ class GetAddClients(APIView):
permission_classes = [IsAuthenticated, ClientsPerms]
def get(self, request):
clients = Client.objects.select_related(
"workstation_policy", "server_policy", "alert_template"
).filter_by_role(request.user)
return Response(
ClientSerializer(clients, context={"user": request.user}, many=True).data
clients = (
Client.objects.order_by("name")
.select_related("workstation_policy", "server_policy", "alert_template")
.filter_by_role(request.user) # type: ignore
.prefetch_related(
Prefetch(
"custom_fields",
queryset=ClientCustomField.objects.select_related("field"),
),
Prefetch(
"sites",
queryset=Site.objects.order_by("name")
.select_related("client")
.filter_by_role(request.user)
.prefetch_related("custom_fields__field")
.annotate(
maintenance_mode=Exists(
Agent.objects.filter(
site=OuterRef("pk"), maintenance_mode=True
)
)
)
.annotate(agent_count=Count("agents")),
to_attr="filtered_sites",
),
)
.annotate(
maintenance_mode=Exists(
Agent.objects.filter(
site__client=OuterRef("pk"), maintenance_mode=True
)
)
)
.annotate(agent_count=Count("sites__agents"))
)
return Response(ClientSerializer(clients, many=True).data)
def post(self, request):
# create client
@@ -57,7 +88,7 @@ class GetAddClients(APIView):
site_serializer.is_valid(raise_exception=True)
if "initialsetup" in request.data.keys():
core = CoreSettings.objects.first()
core = get_core_settings()
core.default_time_zone = request.data["timezone"]
core.save(update_fields=["default_time_zone"])
@@ -84,7 +115,25 @@ class GetUpdateDeleteClient(APIView):
def get(self, request, pk):
client = get_object_or_404(Client, pk=pk)
return Response(ClientSerializer(client, context={"user": request.user}).data)
prefetch_related_objects(
[client],
Prefetch(
"sites",
queryset=Site.objects.order_by("name")
.select_related("client")
.filter_by_role(request.user)
.prefetch_related("custom_fields__field")
.annotate(
maintenance_mode=Exists(
Agent.objects.filter(site=OuterRef("pk"), maintenance_mode=True)
)
)
.annotate(agent_count=Count("agents")),
to_attr="filtered_sites",
),
)
return Response(ClientSerializer(client).data)
def put(self, request, pk):
client = get_object_or_404(Client, pk=pk)
@@ -119,8 +168,6 @@ class GetUpdateDeleteClient(APIView):
return Response("{client} was updated")
def delete(self, request, pk):
from automation.tasks import generate_agent_checks_task
client = get_object_or_404(Client, pk=pk)
agent_count = client.live_agent_count
@@ -129,7 +176,6 @@ class GetUpdateDeleteClient(APIView):
agents = Agent.objects.filter(site__client=client)
site = get_object_or_404(Site, pk=request.query_params["move_to_site"])
agents.update(site=site)
generate_agent_checks_task.delay(all=True, create_tasks=True)
elif agent_count > 0:
return notify_error(
@@ -144,7 +190,7 @@ class GetAddSites(APIView):
permission_classes = [IsAuthenticated, SitesPerms]
def get(self, request):
sites = Site.objects.filter_by_role(request.user)
sites = Site.objects.filter_by_role(request.user) # type: ignore
return Response(SiteSerializer(sites, many=True).data)
def post(self, request):
@@ -220,8 +266,6 @@ class GetUpdateDeleteSite(APIView):
return Response("Site was edited")
def delete(self, request, pk):
from automation.tasks import generate_agent_checks_task
site = get_object_or_404(Site, pk=pk)
if site.client.sites.count() == 1:
return notify_error("A client must have at least 1 site.")
@@ -232,7 +276,6 @@ class GetUpdateDeleteSite(APIView):
agents = Agent.objects.filter(site=site)
new_site = get_object_or_404(Site, pk=request.query_params["move_to_site"])
agents.update(site=new_site)
generate_agent_checks_task.delay(all=True, create_tasks=True)
elif agent_count > 0:
return notify_error(
@@ -247,12 +290,11 @@ class AgentDeployment(APIView):
permission_classes = [IsAuthenticated, DeploymentPerms]
def get(self, request):
deps = Deployment.objects.filter_by_role(request.user)
deps = Deployment.objects.filter_by_role(request.user) # type: ignore
return Response(DeploymentSerializer(deps, many=True).data)
def post(self, request):
from accounts.models import User
from knox.models import AuthToken
site = get_object_or_404(Site, pk=request.data["site"])
@@ -261,12 +303,17 @@ class AgentDeployment(APIView):
installer_user = User.objects.filter(is_installer_user=True).first()
expires = dt.datetime.strptime(
request.data["expires"], "%Y-%m-%d %H:%M"
).astimezone(pytz.timezone("UTC"))
now = djangotime.now()
delta = expires - now
obj, token = AuthToken.objects.create(user=installer_user, expiry=delta)
try:
expires = dt.datetime.strptime(
request.data["expires"], "%Y-%m-%dT%H:%M:%S%z"
)
except Exception:
return notify_error("expire date is invalid")
obj, token = AuthToken.objects.create(
user=installer_user, expiry=expires - djangotime.now()
)
flags = {
"power": request.data["power"],

View File

@@ -33,7 +33,7 @@ meshSystemBin="${meshDir}/meshagent"
meshSvcName='meshagent.service'
meshSysD="/lib/systemd/system/${meshSvcName}"
deb=(ubuntu debian raspbian kali)
deb=(ubuntu debian raspbian kali linuxmint)
rhe=(fedora rocky centos rhel amzn arch opensuse)
set_locale_deb() {

View File

@@ -1,9 +1,12 @@
import asyncio
from agents.models import Agent
from channels.db import database_sync_to_async
from channels.generic.websocket import AsyncJsonWebsocketConsumer
from django.contrib.auth.models import AnonymousUser
from django.db.models import F
from django.utils import timezone as djangotime
from agents.models import Agent
class DashInfo(AsyncJsonWebsocketConsumer):
@@ -33,43 +36,41 @@ class DashInfo(AsyncJsonWebsocketConsumer):
@database_sync_to_async
def get_dashboard_info(self):
server_offline_count = len(
[
agent
for agent in Agent.objects.filter(monitoring_type="server").only(
"pk",
"last_seen",
"overdue_time",
"offline_time",
)
if not agent.status == "online"
]
total_server_agents_count = (
Agent.objects.filter_by_role(self.user)
.filter(monitoring_type="server")
.count()
)
offline_server_agents_count = (
Agent.objects.filter_by_role(self.user)
.filter(monitoring_type="server")
.filter(
last_seen__lt=djangotime.now()
- (djangotime.timedelta(minutes=1) * F("offline_time"))
)
.count()
)
total_workstation_agents_count = (
Agent.objects.filter_by_role(self.user)
.filter(monitoring_type="workstation")
.count()
)
offline_workstation_agents_count = (
Agent.objects.filter_by_role(self.user)
.filter(monitoring_type="workstation")
.filter(
last_seen__lt=djangotime.now()
- (djangotime.timedelta(minutes=1) * F("offline_time"))
)
.count()
)
workstation_offline_count = len(
[
agent
for agent in Agent.objects.filter(monitoring_type="workstation").only(
"pk",
"last_seen",
"overdue_time",
"offline_time",
)
if not agent.status == "online"
]
)
ret = {
"total_server_offline_count": server_offline_count,
"total_workstation_offline_count": workstation_offline_count,
"total_server_count": Agent.objects.filter(
monitoring_type="server"
).count(),
"total_workstation_count": Agent.objects.filter(
monitoring_type="workstation"
).count(),
return {
"total_server_offline_count": offline_server_agents_count,
"total_workstation_offline_count": offline_workstation_agents_count,
"total_server_count": total_server_agents_count,
"total_workstation_count": total_workstation_agents_count,
}
return ret
async def send_dash_info(self):
while self.connected:

Some files were not shown because too many files have changed in this diff Show More