Compare commits

...

1086 Commits

Author SHA1 Message Date
wh1te909
dd8d39e698 Release 0.6.6 2021-04-30 07:05:04 +00:00
wh1te909
afb1316daa bump versions 2021-04-30 07:01:22 +00:00
wh1te909
04d7017536 rework ping checks #444 2021-04-30 06:32:21 +00:00
wh1te909
6a1c75b060 add help toolbar #452 2021-04-30 06:01:22 +00:00
Dan
5c94611f3b Merge pull request #456 from silversword411/develop
WIP it, WIP it good: and script library stuff
2021-04-29 18:08:07 -07:00
silversword411
4e5676e80f adding the wip 2021-04-29 11:45:32 -04:00
wh1te909
c96d688a9c add alert if new trmm version available #453 2021-04-29 08:12:44 +00:00
silversword411
804242e9a5 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-28 22:50:47 -04:00
silversword411
0ec9760b17 Adding to docker 2021-04-28 22:49:49 -04:00
Dan
d481ae3da4 Merge pull request #443 from bradhawkins85/patch-16
Update Win_ScreenConnectAIO.ps1
2021-04-28 09:04:43 -07:00
silversword411
4742c14fc1 Rename temp script 2021-04-28 11:12:18 -04:00
bradhawkins85
509b0d501b Update Win_ScreenConnectAIO.ps1
Updated script notes regarding quoting around variables.
2021-04-28 10:10:18 +10:00
silversword411
d4c9b04d4e Hidden Script Library todo list 2021-04-27 13:11:30 -04:00
silversword411
16fb4d331b script library adding msi install ref script 2021-04-27 13:07:14 -04:00
silversword411
e9e5bf31a7 script library adding file copy script 2021-04-27 12:50:01 -04:00
wh1te909
221418120e Release 0.6.5 2021-04-27 16:20:25 +00:00
wh1te909
46f852e26e bump version 2021-04-27 16:20:08 +00:00
sadnub
4234cf0a31 fix policy task deletion 2021-04-27 12:12:04 -04:00
wh1te909
7f3daea648 Release 0.6.4 2021-04-27 15:36:49 +00:00
wh1te909
2eb16c82f4 bump version 2021-04-27 15:36:38 +00:00
sadnub
e00b2ce591 add test for check deletes 2021-04-27 11:04:06 -04:00
sadnub
d71e1311ca fix deleting checks 2021-04-27 10:58:23 -04:00
sadnub
2cf16963e3 fix custom fields on policy tasks 2021-04-27 10:51:29 -04:00
wh1te909
10bf7b7fb4 update restore docs 2021-04-27 06:18:15 +00:00
wh1te909
182c85a228 Release 0.6.3 2021-04-27 06:02:33 +00:00
wh1te909
94b1988b90 don't make description a required field in edit agent model 2021-04-27 06:00:42 +00:00
wh1te909
6f7e62e9a0 remove alpha status 2021-04-27 05:39:52 +00:00
wh1te909
aa7076af04 bump versions 2021-04-27 05:05:57 +00:00
Dan
c928e8f0d4 Merge pull request #436 from silversword411/develop
Updating management commands
2021-04-26 21:04:08 -07:00
sadnub
5c6b106f68 adding docs for Custom Fields, Scripting, Collector Tasks, and KeyStore 2021-04-26 23:16:10 -04:00
sadnub
d45bcea1ff add mkdocs container to docker dev env 2021-04-26 23:16:10 -04:00
wh1te909
6ff2dc79f8 black 2021-04-27 02:31:33 +00:00
silversword411
b752329987 Adding standardized header comments and example 2021-04-26 20:59:39 -04:00
silversword411
f21465335a clarifying vscode instructions 2021-04-26 20:47:23 -04:00
silversword411
0801adfc4b community script consolidating Defender status reports script 2021-04-26 17:45:56 -04:00
silversword411
5bee8052d5 Fix client site 2021-04-25 23:05:03 -04:00
silversword411
68dca5dfef Updating managment commands 2021-04-25 22:56:56 -04:00
Dan
3f51dd1d2f Merge pull request #435 from silversword411/develop
Docs and tips update
2021-04-25 00:20:05 -07:00
Dan
7f80889d77 Merge pull request #422 from sadnub/develop
Policy rework, global keystore, and collector tasks
2021-04-24 23:57:12 -07:00
sadnub
efc61c0222 fix tests 2021-04-24 22:13:02 -04:00
sadnub
6fc0a05d34 allow adding {{alert.property_name}} to resolved and failure alert scripts 2021-04-24 21:59:41 -04:00
sadnub
a9be872d7a make automated task tables sortable #431 2021-04-24 21:25:55 -04:00
sadnub
6ca85f099e fix autotask modals and allow editing the custom field for a collector task 2021-04-24 21:21:37 -04:00
sadnub
86ff677b8a fix styling 2021-04-24 21:06:17 -04:00
sadnub
35e295df86 implement keystore in script substitution with {{global.name}}. Also fixed issue with space in value. 2021-04-24 21:01:55 -04:00
sadnub
cd4d301790 keystore tests 2021-04-24 20:43:11 -04:00
sadnub
93bb329c3d add frontend end and backend for keystore 2021-04-24 20:36:21 -04:00
silversword411
7c1e0f2c30 More hidden dev docs 2021-04-24 20:06:30 -04:00
sadnub
b57f471f44 add ability to hide custom fields in UI if strictly for script usage 2021-04-24 20:01:28 -04:00
sadnub
252a9a2ed6 implement the rest of collector tasks and add tests 2021-04-24 17:40:44 -04:00
sadnub
7258d4d787 add block inheritance tests and fixes 2021-04-24 15:59:04 -04:00
sadnub
75522fa295 implement policy inheritance blocking 2021-04-24 10:07:37 -04:00
sadnub
4ba8f41d95 fixing tests 2021-04-24 10:07:37 -04:00
sadnub
f326f8e4de policy task and check rework. Added basic collector task implementation 2021-04-24 10:07:37 -04:00
sadnub
f863dc058e add UI for blocking policy inheritance on client, site, and agent 2021-04-24 10:07:37 -04:00
silversword411
20891db251 Tooltip on run interval 2021-04-24 08:52:55 -04:00
silversword411
f1d05f1342 Adding extra optional command line args to dialog 2021-04-23 16:08:16 -04:00
wh1te909
8dd636b0eb Release 0.6.2 2021-04-23 06:40:31 +00:00
wh1te909
6b5bda8ee1 bump versions 2021-04-23 06:12:19 +00:00
Dan
ddc5597157 Merge pull request #421 from silversword411/develop
script library and docs tweaks
2021-04-22 19:08:33 -07:00
silversword411
ae112c7257 script library tweaks 2021-04-21 11:14:45 -04:00
silversword411
c22f10f96a Adding notes to vscode docs 2021-04-21 11:06:40 -04:00
silversword411
18d10c9bec power restart script tweaks 2021-04-21 09:40:07 -04:00
silversword411
890e430cb7 script library - merging scripts and parameterizing 2021-04-21 09:29:25 -04:00
wh1te909
dadc3d4cd7 add #418 2021-04-21 05:09:55 +00:00
Dan
d98b4d7320 Merge pull request #417 from silversword411/develop
tweaks to docs and scripts
2021-04-19 23:05:42 -07:00
silversword411
340f532238 tweaks to docs and scripts 2021-04-20 06:00:03 +00:00
wh1te909
7669f68e7c add code signing to docs 2021-04-20 05:54:24 +00:00
Dan
3557e5514f Merge pull request #416 from silversword411/develop
docs tweaks
2021-04-19 22:50:58 -07:00
silversword411
a9f09b7614 knew there was a bold somewhere 2021-04-20 05:15:24 +00:00
silversword411
845b9e4568 docs tweaks 2021-04-20 05:12:58 +00:00
Dan
24a6092dcf Merge pull request #415 from silversword411/develop
Community Script Library Docs v1
2021-04-19 21:36:37 -07:00
wh1te909
195ae7d8b1 add conditional menu render 2021-04-20 04:35:07 +00:00
silversword411
a5c6ea7ffc Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-20 00:32:47 -04:00
silversword411
eb7a4ac29f Script library - more cleaning 2021-04-20 00:32:35 -04:00
silversword411
508ef73fde Contributing Community Scripts v1 2021-04-20 00:32:35 -04:00
silversword411
838d6d8076 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-20 00:26:25 -04:00
silversword411
762c3159b8 Script library - more cleaning 2021-04-20 00:26:20 -04:00
wh1te909
7a88a06bcf isort 2021-04-20 04:14:20 +00:00
wh1te909
0b1e3d7de5 start fixing #409 2021-04-20 04:11:48 +00:00
silversword411
9a83c73f21 Contributing Community Scripts v1 2021-04-20 04:11:15 +00:00
Dan
aa50c7b268 Merge pull request #414 from silversword411/develop
adding agent remove/add to docs
2021-04-19 20:33:53 -07:00
silversword411
179a5a80f4 Fixing Defender GUID 2021-04-19 23:06:35 -04:00
silversword411
0ddae527ef Script Library - Renaming files to follow best practices 2021-04-19 23:02:42 -04:00
silversword411
ee7a46de26 Script library - defender status tweaks 2021-04-19 22:28:51 -04:00
silversword411
95522fda74 script library - Adding set DNS script 2021-04-19 18:38:22 -04:00
silversword411
e58881c2bd script library Set Ethernet to use DHCP 2021-04-19 18:32:36 -04:00
silversword411
36a902a44e script rename and tweaks 2021-04-19 17:11:53 -04:00
silversword411
16b74549a2 adding agent remove/add to docs 2021-04-19 11:45:55 +00:00
wh1te909
da7ededfb1 fix sorting #402 2021-04-17 20:06:07 +00:00
wh1te909
790bb08718 fix check status in summary tab 2021-04-17 19:56:10 +00:00
Dan
e6765f421f Merge pull request #408 from silversword411/develop
Tooltip update
2021-04-17 12:54:36 -07:00
silversword411
7e8f1fe904 Tooltip update 2021-04-17 02:09:06 -04:00
Dan
eacce4578a Merge pull request #407 from bradhawkins85/patch-15
Update installer.ps1
2021-04-16 22:05:14 -07:00
bradhawkins85
07b2543972 Update installer.ps1
Test and wait (up to 15 seconds) to be able to connect to the server to download installer, don't try and download if we can't connect.
2021-04-17 13:41:35 +10:00
wh1te909
d1c3fc8493 Release 0.6.1 2021-04-16 07:46:42 +00:00
wh1te909
f453b16010 bump versions 2021-04-16 07:36:27 +00:00
wh1te909
05151d8978 add code signed agent to powershell/manual install methods 2021-04-16 07:16:55 +00:00
Dan
8218e1acc3 Merge pull request #397 from silversword411/develop
script library updates
2021-04-16 00:11:57 -07:00
wh1te909
30212fc89a fix maint mode text 2021-04-16 06:24:27 +00:00
sadnub
b31c13fcae add warning color to agents table and clients tree. Also made it upadte colors when checks UI is refreshed 2021-04-15 22:24:44 -04:00
sadnub
6b95fc6f1d change maintenance mode to green and modify the icon in the agent table when agent is in maintenance mode 2021-04-15 19:15:02 -04:00
sadnub
369cf17eb2 also resolve alerts when a check is cleared 2021-04-15 17:23:43 -04:00
sadnub
4dd8f512cc split up check statuses in the agent summary tab. #386 2021-04-15 17:12:46 -04:00
sadnub
26cfec7d80 add reset check status to check context menu. #388 2021-04-15 16:52:42 -04:00
sadnub
67a87ccf00 fix sticky table header in automated tasks 2021-04-15 16:12:09 -04:00
sadnub
667cebcf94 remove certain fields from view in the patch policy form when settnigs are inherited #396 2021-04-15 13:52:24 -04:00
sadnub
bc1747ca1c fix categories in script manager folder view. Truncate script args text in scripts table 2021-04-15 13:52:24 -04:00
silversword411
945d8647bf script add ipv6 disable 2021-04-15 11:34:35 -04:00
silversword411
dfe2e94627 tweaking script library after 0.6.0 update 2021-04-15 08:47:04 -04:00
silversword411
09a5591eec tweak docs so backup script overwrites existing name 2021-04-15 07:59:22 -04:00
silversword411
f2bf06a0ba tweak Network script names for sorting 2021-04-15 07:51:07 -04:00
silversword411
eedad4ab1c Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-15 07:47:41 -04:00
silversword411
336a62ab29 Tweaking script names 2021-04-15 07:47:31 -04:00
wh1te909
b5603a5233 Release 0.6.0 2021-04-15 05:39:24 +00:00
wh1te909
73890f553c bump versions 2021-04-15 05:34:47 +00:00
sadnub
f6243b8968 update community script to include guid and update/delete existing community scripts 2021-04-14 22:43:12 -04:00
sadnub
3770dc74d4 fix scripts dropdown 2021-04-14 21:48:39 -04:00
wh1te909
45f4e947c5 more code signing stuff 2021-04-15 01:24:58 +00:00
Dan
9928d7c6e1 Merge pull request #394 from tremor021/develop
Fixed script naming scheme
2021-04-14 18:20:31 -07:00
silversword411
bf776eeb2b Tweaking script names 2021-04-14 15:12:02 -04:00
tremor021
ae7c0e9195 update 2021-04-14 15:53:54 +02:00
tremor021
e90b640602 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-04-14 15:48:37 +02:00
tremor021
ba7529d3f5 update 2021-04-14 15:30:57 +02:00
tremor021
34667f252e Fixed naming and added task scheduling script 2021-04-14 15:23:22 +02:00
wh1te909
d18bddcb7b add code signing auth token 2021-04-14 07:49:28 +00:00
wh1te909
96dff49d33 add better exception messages to tests 2021-04-14 07:39:17 +00:00
Dan
b389728338 Merge pull request #392 from tremor021/develop
Scripts update
2021-04-14 00:35:20 -07:00
tremor021
cdc7da86f3 Fixes 2021-04-14 09:26:42 +02:00
tremor021
4745cc0378 Fixes 2021-04-14 09:25:26 +02:00
wh1te909
434f132479 add a test to make sure community script has jsonfile entry 2021-04-14 07:05:08 +00:00
tremor021
fb0f31ffc7 Added script to json 2021-04-14 08:55:17 +02:00
Dan
bb1d73c0ae Merge pull request #393 from silversword411/develop
Script library additions
2021-04-13 23:32:33 -07:00
silversword411
0e823d1191 Fixing bitlocker get recovery keys script 2021-04-14 00:40:26 -04:00
silversword411
48f4199ff3 script library - bitlocker get recovery keys 2021-04-14 00:39:20 -04:00
silversword411
eaf379587b script library - disable hibernation 2021-04-14 00:04:43 -04:00
silversword411
672446b7d1 script library check if user using temp profile 2021-04-13 23:52:04 -04:00
silversword411
dfe52c1b07 script library - task scheduler monitor 2021-04-13 23:48:15 -04:00
silversword411
d63df03ad8 script library - new user notify 2021-04-13 23:42:34 -04:00
silversword411
aba4f9f2ce script library - Azure Mars Backup check 2021-04-13 23:37:05 -04:00
silversword411
ac5c1e7803 Script library - Adding enable and disable USB devices 2021-04-13 23:26:48 -04:00
tremor021
d521dbf50e Added more scripts 2021-04-13 23:39:44 +02:00
tremor021
f210ed3e6a Added Get_Computer_Users script 2021-04-13 23:25:10 +02:00
tremor021
df3cac4ea6 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-04-13 22:14:14 +02:00
tremor021
f778c5175b Added some explanations into scripts 2021-04-13 22:10:37 +02:00
Dan
6c66ff28dd Merge pull request #385 from silversword411/develop
script library updates and docs tweak
2021-04-11 22:39:12 -07:00
silversword411
d5b6ec702b Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-11 11:36:20 -04:00
silversword411
c62a5fcef2 script library empty recycle bin2 2021-04-11 11:35:53 -04:00
silversword411
59c47e9200 script library - empty recycle bin 2021-04-11 11:35:53 -04:00
silversword411
4ba44d8932 script library - Monitor info script 2021-04-11 11:35:53 -04:00
silversword411
27dae05e1b Script add - RAM status 2021-04-11 11:35:53 -04:00
silversword411
a251ae9b90 2 scripts added 2021-04-11 11:35:53 -04:00
silversword411
7e960b2bde Fixing Table of Contents levels 2021-04-11 11:35:53 -04:00
silversword411
5df4825158 Consistency check on script "name" field for alphabetic sorts 2021-04-11 11:35:53 -04:00
silversword411
8984d06d93 script library empty recycle bin2 2021-04-11 11:26:59 -04:00
silversword411
eed7aac047 script library - empty recycle bin 2021-04-11 11:26:22 -04:00
silversword411
54b068de4a script library - Monitor info script 2021-04-11 11:21:03 -04:00
silversword411
f0f33b00b6 Script add - RAM status 2021-04-11 09:13:47 -04:00
silversword411
1043405088 2 scripts added 2021-04-11 08:47:36 -04:00
silversword411
0131b10805 Fixing Table of Contents levels 2021-04-11 08:02:51 -04:00
silversword411
a19b441f62 Consistency check on script "name" field for alphabetic sorts 2021-04-11 07:58:03 -04:00
wh1te909
28edc31d43 Release 0.5.3 2021-04-11 08:08:58 +00:00
wh1te909
0f9872a818 bump versions 2021-04-11 08:08:48 +00:00
wh1te909
76ce4296f3 fix graphics 2021-04-11 07:25:37 +00:00
wh1te909
3dd2671380 add graphics 2021-04-11 06:50:16 +00:00
wh1te909
298ca31332 remove unused func 2021-04-11 05:43:17 +00:00
wh1te909
8f911aa6b9 more tests 2021-04-11 05:35:24 +00:00
wh1te909
82a5c7d9b1 add test 2021-04-11 05:17:49 +00:00
wh1te909
7f013dcdba refactor nats-api / optimize queries 2021-04-11 05:04:33 +00:00
wh1te909
68e2e16076 add feat #377 2021-04-11 03:23:40 +00:00
wh1te909
ea23c763c9 add feat #376 2021-04-11 02:01:40 +00:00
wh1te909
5dcecb3206 fix alert text for policy diskspace check where disk doesn't exist 2021-04-10 22:09:24 +00:00
Dan
5bd48e2d0e Merge pull request #380 from silversword411/develop
Community Script Additions
2021-04-10 13:36:26 -07:00
silversword411
afd0a02589 3 scripts added from dinger1986 2021-04-10 13:44:02 -04:00
silversword411
2379192d53 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-10 13:27:35 -04:00
silversword411
a6489290c8 2 scripts added 2021-04-10 13:26:39 -04:00
silversword411
5f74c43415 2 scripts added 2021-04-10 13:22:54 -04:00
wh1te909
aa8b84a302 Release 0.5.2 2021-04-09 18:30:30 +00:00
wh1te909
b987d041b0 bump version 2021-04-09 18:29:08 +00:00
wh1te909
b62e37307e revert meshcentral back to 0.7.93 2021-04-09 18:28:43 +00:00
Dan
61a59aa6ac Merge pull request #375 from silversword411/develop
scripts 4 adds and a rename
2021-04-09 00:09:17 -07:00
silversword411
f79ec27f1d Adding 5 new scripts 2021-04-09 01:05:58 -04:00
silversword411
b993fe380f Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-09 00:40:24 -04:00
silversword411
d974b5f55f Script screenconnect rename 2021-04-09 00:38:22 -04:00
wh1te909
f21ae93197 Release 0.5.1 2021-04-08 08:05:08 +00:00
wh1te909
342ff18be8 bump versions 2021-04-08 07:58:04 +00:00
wh1te909
a8236f69bf catch msgpack decode errors 2021-04-08 06:48:43 +00:00
wh1te909
ab15a2448d update reqs 2021-04-08 06:09:48 +00:00
wh1te909
6ff4d8f558 run migrations during restore 2021-04-08 05:57:16 +00:00
sadnub
bb04ba528c make sure logs dir exists for api 2021-04-07 19:39:35 -04:00
sadnub
b94a795189 specify names for the dev and prod containers and fix frontend web .env generation 2021-04-07 19:39:35 -04:00
wh1te909
9968184733 fix alert sending the wrong winsvc status text 2021-04-07 20:34:29 +00:00
silversword411
1be6f8f87a Script screenconnect rename 2021-04-07 11:56:02 +00:00
wh1te909
426821cceb django 3.2 2021-04-07 04:58:35 +00:00
wh1te909
4fec0deaf7 add another server for exe gen 2021-04-07 04:52:16 +00:00
Dan
144ac5b6ce Merge pull request #373 from silversword411/develop
Script rename-ageddon v1
2021-04-06 21:41:22 -07:00
Dan
97c73786fa Merge pull request #372 from bradhawkins85/patch-12
Update installer.ps1
2021-04-06 21:40:24 -07:00
silversword411
82e59d7da0 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-07 04:09:24 +00:00
silversword411
b2c10de6af Script rename-ageddon v1 2021-04-07 04:09:04 +00:00
silversword411
d72029c2c6 Script rename-ageddon v1 2021-04-07 04:08:30 +00:00
bradhawkins85
17b9987063 Update installer.ps1
Set TLS version to 1.2
2021-04-07 13:47:26 +10:00
Dan
fde07da2b7 Merge pull request #371 from silversword411/develop
Script Category-geddon v1
2021-04-06 16:32:17 -07:00
silversword411
c23bc29511 Don't tell anyone, secret devbox rockin docs WIP 2021-04-06 04:12:56 +00:00
silversword411
714cad2a52 Script Category-geddon v1 2021-04-06 03:30:59 +00:00
wh1te909
357d5d2fde sort scripts alphabetically 2021-04-05 09:00:32 +00:00
Dan
d477cce901 Merge pull request #369 from bradhawkins85/patch-11
Update ScreenConnectAIO.ps1
2021-04-05 01:11:38 -07:00
bradhawkins85
eb6af52ad1 Update ScreenConnectAIO.ps1
Add error checking to ensure required custom fields have been created, stop the script if they have not been set up.
2021-04-05 17:51:01 +10:00
wh1te909
aae75023a7 add some more tests for community scripts json file 2021-04-05 07:21:47 +00:00
wh1te909
41dcd4f458 fix screenconnect args 2021-04-05 07:21:26 +00:00
Dan
4651ae4495 Merge pull request #368 from bradhawkins85/patch-10
Create ScreenConnectAIO.ps1
2021-04-04 23:45:50 -07:00
bradhawkins85
ed61e0b0fc Create ScreenConnectAIO.ps1
Install, Uninstall, Start and Stop ScreenConnect Access Agent
2021-04-05 16:42:05 +10:00
Dan
1eefc6fbf4 Merge pull request #367 from wh1te909/revert-365-patch-8
Revert "Create ScreenConnectAIO.ps1"
2021-04-04 23:40:33 -07:00
Dan
09ebf2cea2 Revert "Create ScreenConnectAIO.ps1" 2021-04-04 23:40:17 -07:00
Dan
b3b0c4cd65 Merge pull request #366 from bradhawkins85/patch-9
Update community_scripts.json
2021-04-04 23:21:52 -07:00
Dan
f4b7924e8f Merge pull request #365 from bradhawkins85/patch-8
Create ScreenConnectAIO.ps1
2021-04-04 23:21:44 -07:00
bradhawkins85
ea68d38b82 Update community_scripts.json
add ScreenConnectAIO.ps1
2021-04-05 16:18:29 +10:00
bradhawkins85
dfbaa71132 Create ScreenConnectAIO.ps1
Install, Uninstall, Start and Stop ScreenConnect Access agent script.
2021-04-05 15:59:08 +10:00
Dan
6c328deb08 Merge pull request #364 from silversword411/develop
Polishing vscode contribute docs
2021-04-04 22:29:50 -07:00
silversword411
add564d5bf Polishing vscode docs v2 2021-04-05 00:11:56 +00:00
silversword411
fa94acb426 Updating Disk Health and Duplicati scripts 2021-04-05 00:07:24 +00:00
silversword411
6827468f13 Polishing vscode contribute docs 2021-04-04 23:51:39 +00:00
Dan
53fd43868f Merge pull request #362 from silversword411/develop
Adding vscode contributing Howto to docs
2021-04-04 13:45:27 -07:00
silversword411
9ced7561c5 Adding GUID's to all scripts 2021-04-04 18:29:21 +00:00
silversword411
31d55d3425 Adding vscode contributing Howto to docs 2021-04-04 18:10:19 +00:00
wh1te909
171d2a5bb9 update docs 2021-04-04 09:21:17 +00:00
wh1te909
c5d05c1205 Release 0.5.0 2021-04-04 07:51:19 +00:00
wh1te909
2973e0559a bump versions 2021-04-04 07:49:27 +00:00
wh1te909
ec27288dcf add link 2021-04-04 07:47:20 +00:00
wh1te909
f92e5c7093 update docs 2021-04-04 07:37:36 +00:00
wh1te909
7c67155c49 update docs 2021-04-04 07:10:30 +00:00
wh1te909
b102cd4652 log unhashable type errors when parsing custom fields 2021-04-04 05:49:39 +00:00
wh1te909
67f9a48c37 remove version from consumers view 2021-04-04 02:09:44 +00:00
wh1te909
a0c8a1ee65 change consumers 2021-04-04 01:59:06 +00:00
wh1te909
7e7d272b06 fix update script 2021-04-04 01:41:25 +00:00
sadnub
3c642240ae fix showing default value in script variable if value doesn't exist 2021-04-03 21:37:09 -04:00
wh1te909
b5157fcaf1 update bash scripts for channels 2021-04-04 01:13:27 +00:00
sadnub
d1cb42f1bc fix nats container config path 2021-04-03 20:52:06 -04:00
sadnub
84cde1a16a fix vuex getter for community script show state 2021-04-03 20:40:15 -04:00
wh1te909
877f5db1ce update reqs 2021-04-03 23:39:46 +00:00
sadnub
787164e245 add websockets container. Fix mesh upload on new installation. remove cypress until we need it 2021-04-03 17:50:18 -04:00
wh1te909
d77fc5e7c5 isort 2021-04-03 03:36:28 +00:00
wh1te909
cca39a67d6 start channels tests 2021-04-03 03:35:41 +00:00
wh1te909
a6c9a0431a isort skip 2021-04-03 03:34:46 +00:00
wh1te909
729a80a639 switch to jsonwebsocket 2021-04-03 03:25:01 +00:00
wh1te909
31cb3001f6 Merge branch 'channels' into develop 2021-04-03 00:57:18 +00:00
wh1te909
5d0f54a329 fix typo 2021-04-03 00:50:28 +00:00
wh1te909
c8c3f5b5b7 add channels to install script 2021-04-03 00:24:31 +00:00
wh1te909
ba473ed75a fix channels in prod 2021-04-03 00:17:20 +00:00
wh1te909
7236fd59f8 more websocket work 2021-04-02 22:55:16 +00:00
wh1te909
9471e8f1fd add channels to reqs 2021-04-02 22:54:58 +00:00
Dan
a2d39b51bb Merge pull request #359 from silversword411/develop
Rename computer script - change default timeout
2021-04-02 14:10:29 -07:00
wh1te909
2920934b55 fix scripts and tests 2021-04-02 21:03:32 +00:00
silversword411
3f709d448e Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-04-02 20:49:55 +00:00
silversword411
b79f66183f Changing Rename Computer Default Timeout 2021-04-02 20:48:46 +00:00
silversword411
8672f57e55 Changing Rename Computer Default Timeout 2021-04-02 20:47:06 +00:00
wh1te909
1e99c82351 testing websockets with channels 2021-04-02 20:04:04 +00:00
sadnub
1a2ff851f3 remove console log 2021-04-02 15:53:39 -04:00
sadnub
f1c27c3959 fix script timeout on running favorite script 2021-04-02 15:50:12 -04:00
sadnub
b30dac0f15 add script default args and reworked the script dropdowns to include categories 2021-04-02 15:48:08 -04:00
wh1te909
cc79e5cdaf software tests 2021-04-01 19:39:45 +00:00
wh1te909
d9a3b2f2cb tests 2021-04-01 18:54:47 +00:00
wh1te909
479b528d09 more tests 2021-04-01 09:01:57 +00:00
wh1te909
461fb84fb9 add tests 2021-04-01 08:41:51 +00:00
wh1te909
bd7685e3fa update docs 2021-04-01 07:40:42 +00:00
wh1te909
cd98cb64b3 refactor some installer views 2021-04-01 07:34:34 +00:00
sadnub
0f32a3ec24 good start to script variables 2021-04-01 00:23:42 -04:00
Dan
ca446cac87 Merge pull request #357 from bbrendon/patch-1
Update Check_Events_for_Bluescreens.ps1 - indicate time frame.
2021-03-31 17:53:21 -07:00
Brendon Baumgartner
6ea907ffda Update Check_Events_for_Bluescreens.ps1
indicate time frame.
2021-03-31 17:11:45 -07:00
wh1te909
5287baa70d fix test 2021-03-31 20:46:15 +00:00
wh1te909
25935fec84 add test for spaces in script filenames 2021-03-31 20:19:18 +00:00
Dan
e855a063ff Merge pull request #356 from tremor021/develop
Added some scripts
2021-03-31 12:15:01 -07:00
tremor021
c726b8c9f0 Fixed some things 2021-03-31 21:02:36 +02:00
tremor021
13cb99290e Revert "Fixed script naming"
This reverts commit cea9413fd1.
2021-03-31 20:57:50 +02:00
tremor021
cea9413fd1 Fixed script naming 2021-03-31 20:54:07 +02:00
wh1te909
1432853b39 Release 0.4.32 2021-03-31 18:35:05 +00:00
tremor021
6d6c2b86e8 Added some scripts 2021-03-31 20:34:45 +02:00
wh1te909
77b1d964b5 bump versions 2021-03-31 18:33:43 +00:00
wh1te909
549936fc09 add logging and timeout to deployment gen 2021-03-31 18:24:27 +00:00
wh1te909
c9c32f09c5 public docs on push to master instead of develop 2021-03-31 18:11:14 +00:00
sadnub
77f7778d4a fix being ablwe to add/edit automation and alert templates on sites and clients 2021-03-31 12:03:26 -04:00
wh1te909
84b6be9364 un-hide custom fields 2021-03-31 07:29:28 +00:00
wh1te909
1e43b55804 Release 0.4.31 2021-03-31 07:20:46 +00:00
wh1te909
ba9bdaae0a bump versions 2021-03-31 07:09:20 +00:00
wh1te909
7dfd7bde8e fix update 2021-03-31 07:02:35 +00:00
Dan
5e6c4161d0 Merge pull request #355 from silversword411/develop
Scripts choco update
2021-03-30 23:26:17 -07:00
wh1te909
d75d56dfc9 hide customfields in ui for now 2021-03-31 06:22:53 +00:00
silversword411
1d9d350091 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-03-31 02:19:57 -04:00
silversword411
5744053c6f Scripts choco update 2021-03-31 02:14:18 -04:00
wh1te909
65589b6ca2 clients manager table ui fixes 2021-03-31 06:13:30 +00:00
silversword411
e03a9d1137 Scripts choco update 2021-03-31 00:19:44 -04:00
Dan
29f80f2276 Merge pull request #354 from silversword411/develop
2 script updates, one removal
2021-03-30 21:01:22 -07:00
silversword411
a9b74aa69b Commit the file renames 2021-03-30 23:43:18 -04:00
silversword411
63ebfd3210 2 script updates, one removal 2021-03-30 23:20:58 -04:00
wh1te909
87fa5ff7a6 feat: add default timeout in script manager closes #352 2021-03-31 03:01:46 +00:00
sadnub
b686b53a9c Update dockerfile 2021-03-30 17:11:06 -04:00
wh1te909
258261dc64 refactor goinstaller to prepare for code signing 2021-03-30 20:52:03 +00:00
sadnub
9af5c9ead9 remove console.log entries 2021-03-29 18:34:40 -04:00
wh1te909
382654188c update install docs 2021-03-29 22:02:30 +00:00
Dan
fa1df082b7 Merge pull request #345 from silversword411/develop
Updating scripts
2021-03-29 14:12:00 -07:00
sadnub
5c227d8f80 formatting 2021-03-29 15:31:29 -04:00
sadnub
81dabdbfb7 fix tests 2021-03-29 15:27:08 -04:00
sadnub
91f89f5a33 custom fields finish 2021-03-29 15:14:20 -04:00
silversword411
9f92746aa0 Adding Install All Updates and extra categories 2021-03-29 09:22:51 -04:00
wh1te909
5d6e6f9441 fix custom fields 2021-03-29 11:14:04 +00:00
wh1te909
01395a2726 isort 2021-03-29 10:23:54 +00:00
sadnub
465d75c65d formatting 2021-03-28 18:22:04 -04:00
sadnub
4634f8927e add tests for clients changes and custom fields 2021-03-28 18:17:35 -04:00
sadnub
74a287f9fe update containers to node 14 and reconfigure nats-api 2021-03-28 09:23:06 -04:00
wh1te909
7ff6c79835 remove natsapi from normal install 2021-03-27 20:00:47 +00:00
wh1te909
3629982237 refactor natsapi 2021-03-27 19:21:52 +00:00
silversword411
ddb610f1bc Bitlocker script update 2021-03-27 00:47:11 -04:00
silversword411
f899905d27 Updating script to std format: AD 2021-03-27 00:41:51 -04:00
silversword411
3e4531b5c5 Merge remote-tracking branch 'upstream/develop' into develop 2021-03-27 00:35:13 -04:00
wh1te909
a9e189e51d fix edit client, more tests 2021-03-27 00:25:45 -04:00
Dan
58ba08a8f3 Merge pull request #342 from silversword411/develop
Updating labels from (s) to (seconds)
2021-03-26 20:59:09 -07:00
silversword411
9078ff27d8 Updating (s) in labels to (seconds) 2021-03-26 22:48:13 -04:00
silversword411
6f43e61c24 Disk label v2 2021-03-26 18:48:40 -04:00
silversword411
4be0d3f212 Updating Disk check label 2021-03-26 18:39:03 -04:00
wh1te909
00e47e5a27 fix edit client, more tests 2021-03-26 22:25:13 +00:00
Dan
152e145b32 Merge pull request #341 from silversword411/develop
Script Update to standardized format
2021-03-26 14:45:37 -07:00
wh1te909
54e55e8f57 update drf 2021-03-26 21:44:05 +00:00
wh1te909
05b8707f9e black 2021-03-26 21:23:34 +00:00
wh1te909
543e952023 start fixing tests 2021-03-26 21:20:39 +00:00
silversword411
6e5f40ea06 Update community_scripts.json 2021-03-26 10:29:58 -04:00
silversword411
bbafb0be87 bios script update 2021-03-26 10:23:46 -04:00
silversword411
1c9c5232fe Rename bios_check.ps1 to Win_Bios_Check.ps1 2021-03-26 10:17:44 -04:00
wh1te909
598d79a502 fix error msg 2021-03-26 07:48:35 +00:00
wh1te909
37d8360b77 add creation date to deployment closes #340 2021-03-26 06:58:36 +00:00
wh1te909
82d9ca3317 go 1.16.2 2021-03-26 06:48:58 +00:00
wh1te909
4e4238d486 update to nodejs v14 2021-03-26 06:32:24 +00:00
wh1te909
c77dbe44dc remove old salt check 2021-03-26 06:03:04 +00:00
wh1te909
e03737f15f drop upgrade support for trmm < 0.3.0 2021-03-26 05:51:23 +00:00
Dan
a02629bcd7 Merge pull request #337 from sadnub/develop
clients and sites rework and custom fields
2021-03-25 22:24:43 -07:00
sadnub
6c3fc23d78 fix adding clients/sites/agents with custom fields 2021-03-25 23:21:57 -04:00
sadnub
0fe40f9ccb add custom fields to forms and get saving to work 2021-03-25 23:21:57 -04:00
sadnub
9bd7c8edd1 clients and sites rework and custom fields 2021-03-25 23:21:57 -04:00
wh1te909
83ba480863 Merge branch 'master' of https://github.com/wh1te909/tacticalrmm 2021-03-25 23:14:38 +00:00
wh1te909
f158ea25e9 Release 0.4.30 2021-03-25 23:14:16 +00:00
wh1te909
0227519eab bump versions 2021-03-25 23:13:41 +00:00
wh1te909
616a9685fa update reqs 2021-03-25 22:15:58 +00:00
wh1te909
fe61b01320 fix celery async errors 2021-03-24 22:13:02 +00:00
wh1te909
7b25144311 add docs for django admin 2021-03-24 07:12:26 +00:00
sadnub
9d42fbbdd7 exclude mesh agent and debug logs 2021-03-23 10:41:15 -04:00
sadnub
39ac5b088b Update entrypoint.sh 2021-03-23 10:41:04 -04:00
sadnub
c14ffd08a0 exclude mesh agent and debug logs 2021-03-23 09:04:26 -04:00
sadnub
6e1239340b Update entrypoint.sh 2021-03-23 08:56:43 -04:00
wh1te909
a297dc8b3b re-run update.sh when old version detected 2021-03-23 07:39:06 +00:00
wh1te909
8d4ecc0898 update reqs 2021-03-23 07:10:45 +00:00
wh1te909
eae9c04429 Release 0.4.29 2021-03-22 22:35:52 +00:00
wh1te909
a41c48a9c5 bump versions 2021-03-22 22:35:43 +00:00
sadnub
ff2a94bd9b Update dockerfile 2021-03-22 18:12:57 -04:00
wh1te909
4a1f5558b8 Release 0.4.28 2021-03-22 20:48:59 +00:00
wh1te909
608db9889f bump versions 2021-03-22 20:48:39 +00:00
sadnub
012b697337 Update dockerfile 2021-03-22 15:18:11 -04:00
sadnub
0580506cf3 Update entrypoint.sh 2021-03-22 15:17:49 -04:00
Dan
ff4ab9b661 Update issue templates 2021-03-18 23:25:46 -07:00
wh1te909
b7ce5fdd3e Release 0.4.27 2021-03-19 05:21:46 +00:00
wh1te909
a11e617322 bump versions 2021-03-19 05:21:37 +00:00
Dan
d0beac7e2b Merge pull request #330 from silversword411/develop
Added Rename Computer Community Script
2021-03-18 22:19:20 -07:00
silversword411
9db497092f Update Rename_Computer.ps1 2021-03-18 00:38:40 -04:00
wh1te909
8eb91c08aa Release 0.4.26 2021-03-17 17:58:29 +00:00
wh1te909
ded5437522 bump versions 2021-03-17 17:50:56 +00:00
wh1te909
9348657951 fix script manager freezing on latest chrome 2021-03-17 17:36:24 +00:00
wh1te909
bca85933f7 make sure postgres is enabled and running. update npm 2021-03-16 23:09:38 +00:00
silversword411
c32bb35f1c Added Rename Computer Community Script 2021-03-16 17:10:23 -04:00
Dan
4b84062d62 Merge pull request #329 from silversword411/develop
Adding Bluescreen script
2021-03-16 12:04:37 -07:00
silversword411
d6d0f8fa17 fixed description 2021-03-16 14:16:38 -04:00
silversword411
dd72c875d3 Add Bluescreen script
From dinger1986
2021-03-16 14:13:30 -04:00
wh1te909
1a1df50300 show all severity levels closes #326 2021-03-16 17:54:25 +00:00
wh1te909
53cbb527b4 nats 2.2.0 2021-03-16 17:03:09 +00:00
Dan
8b87b2717e Merge pull request #327 from silversword411/develop
Adding AD Recycle Bin script
2021-03-16 09:50:20 -07:00
silversword411
1007d6dac7 Adding AD Recycle Bin script
Check and Enable AD Recycle Bin
2021-03-16 11:39:44 -04:00
Dan
6799fac120 Merge pull request #325 from silversword411/patch-4
Add Chocolatey Update script to community scripts
2021-03-15 10:35:40 -07:00
silversword411
558e6288ca Merge pull request #1 from silversword411/patch-5
Adding Chocolatey updates to community scripts
2021-03-15 05:10:12 -04:00
silversword411
d9cb73291b Adding Chocolatey updates to community scripts 2021-03-15 05:04:32 -04:00
silversword411
d0f7be3ac3 Create Chocolatey_Update_Installed.bat 2021-03-15 04:57:46 -04:00
wh1te909
331e16d3ca bump mesh closes #323 2021-03-13 23:57:46 +00:00
Dan
0db246c311 Merge pull request #324 from silversword411/patch-3
Avoid multiple update file versions
2021-03-13 14:13:03 -08:00
silversword411
94dc62ff58 Avoid multiple update file versions
Kept getting update.sh.1, update.sh.2 etc with each run and then the auto-pasted command wouldn't be running the latest version of the file.
2021-03-13 12:14:53 -05:00
wh1te909
e68ecf6844 update demo link 2021-03-12 08:22:02 +00:00
Dan
5167b0a8c6 Merge pull request #322 from silversword411/patch-3
Removing extra folder
2021-03-12 00:00:06 -08:00
silversword411
77e3d3786d Removing extra folder 2021-03-11 19:16:27 -05:00
wh1te909
708d4d39bc add test 2021-03-11 19:26:36 +00:00
Dan
2a8cda2a1e Merge pull request #321 from silversword411/patch-3
Updating to match install scripts
2021-03-11 10:47:10 -08:00
silversword411
8d783840ad Updating to match install scripts 2021-03-11 12:02:56 -05:00
wh1te909
abe39d5790 remove checks for older agents 2021-03-11 10:53:27 +00:00
wh1te909
d7868e9e5a Release 0.4.25 2021-03-11 10:11:45 +00:00
wh1te909
7b84e36e15 bump versions 2021-03-11 10:11:13 +00:00
wh1te909
6cab6d69d8 Release 0.4.24 2021-03-11 04:36:34 +00:00
wh1te909
87846d7aef bump versions 2021-03-11 04:36:14 +00:00
wh1te909
2557769c6a fix runchecks wh1te909/rmmagent@739e7434ae 2021-03-11 04:20:18 +00:00
wh1te909
48375f3878 Release 0.4.23 2021-03-11 00:35:02 +00:00
wh1te909
176c85d8c1 bump versions 2021-03-11 00:32:31 +00:00
wh1te909
17cad71ede typo 2021-03-10 22:46:11 +00:00
wh1te909
e8bf9d4e6f change thresholds for check run interval 2021-03-10 22:39:16 +00:00
wh1te909
7bdd2038ef enable django admin during install so that it installs properly, disable it at end of install 2021-03-10 22:32:36 +00:00
wh1te909
e9f6e7943a bump mesh 2021-03-10 19:52:37 +00:00
wh1te909
e74ba387ab update reqs 2021-03-10 19:03:11 +00:00
wh1te909
27c79e5b99 refactor method 2021-03-09 09:39:58 +00:00
wh1te909
8170d5ea73 feat: add client tree sorting closes #316 2021-03-09 03:17:43 +00:00
wh1te909
196f73705d isort 2021-03-09 03:14:56 +00:00
wh1te909
ad0bbf5248 add sorting back to status closes #305 2021-03-08 21:17:26 +00:00
wh1te909
4cae9cd90d add hostname to email subject 2021-03-08 06:58:02 +00:00
wh1te909
be7bc55a76 remove redundant buttons that are already in context menus 2021-03-07 10:21:46 +00:00
wh1te909
684b545e8f exclude date 2021-03-07 10:21:08 +00:00
wh1te909
7835cc3b10 update community scripts 2021-03-06 22:11:58 +00:00
Tragic Bronson
f8706b51e8 Merge pull request #314 from nr-plaxon/patch-3
Adding script to create an all-user logon script
2021-03-06 13:56:32 -08:00
nr-plaxon
d97f8fd5da Adding script to create an all-user logon script 2021-03-06 14:40:53 +01:00
sadnub
f8fa87441e black 2021-03-05 23:32:40 -05:00
sadnub
d42537814a sort of addresses #177. Allow ability to override check intervals 2021-03-05 23:27:54 -05:00
sadnub
792421b0e2 adds #66. EventLog Check: Set the number of event logs found before passing/failing 2021-03-05 21:52:08 -05:00
wh1te909
72d55a010b Release 0.4.22 2021-03-05 23:05:17 +00:00
wh1te909
880d8258ce bump versions 2021-03-05 23:02:08 +00:00
wh1te909
b79bf82efb update docs 2021-03-05 22:22:49 +00:00
wh1te909
b3118b6253 add fields to queryset 2021-03-05 09:30:53 +00:00
sadnub
ba172e2e25 fix issue with exception when other pending actions types exists 2021-03-04 16:31:25 -05:00
sadnub
892d53abeb move alert_template to property on agent versus dynamically generating it everytime 2021-03-04 16:27:05 -05:00
sadnub
5cbaa1ce98 fix tests 2021-03-03 22:25:02 -05:00
sadnub
7b35d9ad2e add policy sync to automation manager 2021-03-03 22:03:11 -05:00
wh1te909
8462de7911 fix wording 2021-03-04 02:20:54 +00:00
wh1te909
8721f44298 fix tests 2021-03-04 01:10:52 +00:00
wh1te909
c7a2d69afa rework agent recovery wh1te909/rmmagent@cef1a0efed 2021-03-04 00:51:03 +00:00
wh1te909
0453d81e7a fix pendingactions count 2021-03-03 11:07:20 +00:00
wh1te909
501c04ac2b Release 0.4.21 2021-03-03 10:44:49 +00:00
wh1te909
0ef4e9a5c3 bump versions 2021-03-03 10:44:34 +00:00
wh1te909
129c50e598 fix search/sort 2021-03-03 10:17:45 +00:00
wh1te909
3e276fc2ac isort 2021-03-03 10:17:06 +00:00
sadnub
658d5e05ae black 2021-03-02 23:38:13 -05:00
sadnub
4e7d5d476e add policy exclusions 2021-03-02 23:33:34 -05:00
wh1te909
6a55ca20f3 Release 0.4.20 2021-03-02 23:42:38 +00:00
wh1te909
c56c537f7f HOTFIX 0.4.20 temporarily disable some sorting 2021-03-02 23:42:00 +00:00
wh1te909
fd7d776121 Release 0.4.19 2021-03-02 22:18:18 +00:00
wh1te909
1af28190d8 bump versions 2021-03-02 22:11:40 +00:00
wh1te909
6b305be567 add dash 2021-03-02 22:08:15 +00:00
wh1te909
3bf70513b7 isort 2021-03-02 09:18:35 +00:00
wh1te909
7e64404654 add type hints 2021-03-02 09:13:24 +00:00
wh1te909
e1b5226f34 fix alert 2021-03-02 08:46:41 +00:00
wh1te909
0d7128ad31 Revert "bump versions"
This reverts commit 5778626087.
2021-03-02 08:41:17 +00:00
wh1te909
5778626087 bump versions 2021-03-02 08:07:39 +00:00
wh1te909
3ff48756ed continue on defender errors 2021-03-02 07:38:14 +00:00
sadnub
0ce9a6eeba black 2021-03-01 22:14:48 -05:00
sadnub
ad527b4aed alerts rework and tests 2021-03-01 22:10:38 -05:00
sadnub
6633bb452e remove jest and add cypress for frontend testing 2021-03-01 22:10:38 -05:00
wh1te909
efeb0b4feb add tests 2021-03-02 00:45:37 +00:00
wh1te909
8cc11fc102 fix pendingactions ui 2021-03-02 00:39:42 +00:00
Tragic Bronson
ee6a167220 Merge pull request #302 from silversword411/patch-2
tweak for workflow
2021-03-01 16:16:38 -08:00
silversword411
8d4ad3c405 tweak for workflow 2021-03-01 19:11:01 -05:00
Tragic Bronson
072fbf4d60 Merge pull request #299 from silversword411/patch-3
Linking to FAQ
2021-03-01 15:24:55 -08:00
silversword411
727c41c283 Update install_server.md 2021-03-01 18:15:12 -05:00
silversword411
e2266838b6 Linking to FAQ
minor update and link to FAQ
2021-03-01 17:59:53 -05:00
Tragic Bronson
775762d615 Merge pull request #298 from silversword411/patch-2
Fixing bash commands
2021-03-01 14:56:34 -08:00
silversword411
900c3008cb Fixing bash commands
Removing ID/server so paste will work
2021-03-01 17:44:35 -05:00
sadnub
09379213a6 fix formatting 2021-03-01 17:37:24 -05:00
sadnub
ceb97048e3 Update mkdocs.yml 2021-03-01 17:34:27 -05:00
sadnub
4561515517 Create update_docker.md 2021-03-01 17:33:41 -05:00
wh1te909
a7b285759f delete chocolog model 2021-03-01 21:43:54 +00:00
wh1te909
b4531b2a12 ui tweaks 2021-03-01 21:37:59 +00:00
wh1te909
9e1d261c76 update faq 2021-03-01 21:09:12 +00:00
Tragic Bronson
e35fa15cd2 Merge pull request #297 from silversword411/patch-1
Docs addition - Recover login for Mesh Central
2021-03-01 13:01:11 -08:00
wh1te909
dbd1f0d4f9 pending actions refactor 2021-03-01 20:40:46 +00:00
wh1te909
9ade78b703 fix restore docs 2021-03-01 19:45:10 +00:00
silversword411
f20e244b5f Recover login for Mesh Central 2021-03-01 12:50:56 -05:00
wh1te909
0989308b7e fix tests 2021-03-01 09:35:26 +00:00
wh1te909
12c7140536 more choco rework 2021-03-01 09:26:37 +00:00
wh1te909
2a0b605e92 return empty val for missing software install date 2021-03-01 08:21:56 +00:00
wh1te909
6978890e6a add contributing docs 2021-03-01 07:51:31 +00:00
Tragic Bronson
561abd6cb9 Merge pull request #296 from beejayzed/develop
Add community script to verify antivirus status
2021-02-28 23:32:55 -08:00
beejayzed
4dd6227f0b Update community_scripts.json 2021-03-01 07:55:31 +07:00
beejayzed
1ec314c31c Rename VerifyAntivirus to VerifyAntivirus.ps1 2021-03-01 07:52:43 +07:00
beejayzed
a2be5a00be Create VerifyAntivirus 2021-03-01 07:50:56 +07:00
wh1te909
4e2241c115 start chocolatey rework 2021-02-28 11:00:45 +00:00
wh1te909
8459bca64a fix nats ping dict 2021-02-28 09:54:53 +00:00
wh1te909
24cb0565b9 add pagination to agent table 2021-02-28 09:18:04 +00:00
wh1te909
9442acb028 fix pipeline typo 2021-02-27 23:41:08 +00:00
wh1te909
4f7f181a42 fix pipeline 2021-02-27 23:24:06 +00:00
wh1te909
b7dd8737a7 make django admin disabled by default 2021-02-27 23:19:35 +00:00
wh1te909
2207eeb727 add missing import 2021-02-27 23:09:01 +00:00
wh1te909
89dad7dfe7 add sponsors info to docs 2021-02-27 22:37:11 +00:00
wh1te909
e5803d0cf3 bump mesh 2021-02-27 07:45:56 +00:00
wh1te909
c1fffe9ae6 add timeout to net 2021-02-27 06:08:42 +00:00
wh1te909
9e6cbd3d32 set uwsgi procs based on cpu count 2021-02-27 05:28:42 +00:00
wh1te909
2ea8742510 natsapi refactor 2021-02-27 00:23:03 +00:00
wh1te909
5cfa0254f9 isort 2021-02-26 23:25:44 +00:00
wh1te909
8cd2544f78 add new management command 2021-02-26 22:05:42 +00:00
wh1te909
c03b768364 fix typos 2021-02-26 09:01:14 +00:00
wh1te909
d60481ead4 add docs for management commands 2021-02-25 20:55:56 +00:00
Tragic Bronson
126be3827d Merge pull request #292 from bradhawkins85/patch-6
Update installer.ps1
2021-02-25 10:06:04 -08:00
bradhawkins85
121274dca2 Update installer.ps1
Don't try and add Windows Defender exceptions if Defender is not enabled, prevents errors during script execution.
2021-02-25 19:59:29 +10:00
wh1te909
0ecf8da27e add management commands for resetting pw/2fa 2021-02-25 07:56:17 +00:00
wh1te909
4a6bcb525d update docs 2021-02-25 07:55:13 +00:00
wh1te909
83f9ee50dd add management commands for resetting pw/2fa 2021-02-25 07:55:03 +00:00
wh1te909
2bff297f79 Release 0.4.18 2021-02-24 20:52:49 +00:00
wh1te909
dee68f6933 bump versions 2021-02-24 20:51:47 +00:00
wh1te909
afa1e19c83 also grep postgres info during restore #285 2021-02-24 20:39:02 +00:00
wh1te909
6052088eb4 grab postgres creds automatically for backup closes #285 2021-02-24 19:23:47 +00:00
wh1te909
c7fa5167c4 also reinstall py env / node modules during forced update 2021-02-24 11:25:42 +00:00
wh1te909
1034b0b146 also reinstall py env / node modules during forced update 2021-02-24 11:24:47 +00:00
wh1te909
8bcc4e5945 fix docs styling 2021-02-24 10:04:45 +00:00
wh1te909
c3c24aa1db black 2021-02-24 09:46:38 +00:00
wh1te909
281c75d2d2 add find_software management command 2021-02-24 09:42:24 +00:00
wh1te909
52307420f3 more docs 2021-02-24 09:36:59 +00:00
wh1te909
6185347cd8 remove border 2021-02-24 09:34:30 +00:00
wh1te909
b6cd29f77e change wording 2021-02-24 09:26:36 +00:00
wh1te909
b8ea8b1567 typo 2021-02-24 08:38:44 +00:00
wh1te909
2f7dc98830 change save query 2021-02-24 07:37:48 +00:00
wh1te909
e248a99f79 add option to run sched task asap after scheduled start was missed #247 2021-02-24 06:14:28 +00:00
wh1te909
4fb6d9aa5d more docs 2021-02-24 05:32:16 +00:00
sadnub
f092ea8d67 black 2021-02-23 23:58:28 -05:00
sadnub
c32cbbdda6 check run tests and agent alert actions tests 2021-02-23 23:53:55 -05:00
sadnub
2497675259 UI changes for AddAutomated Task and ScriptCheck models 2021-02-23 23:53:55 -05:00
sadnub
8d084ab90a docker dev changes 2021-02-23 23:53:55 -05:00
wh1te909
2398773ef0 moar docs 2021-02-24 03:33:39 +00:00
wh1te909
a05998a30e docs 2021-02-24 00:12:55 +00:00
wh1te909
f863c29194 more docs 2021-02-23 22:19:58 +00:00
wh1te909
d16a98c788 Release 0.4.17 2021-02-23 19:26:54 +00:00
wh1te909
9421b02e96 bump versions 2021-02-23 19:26:17 +00:00
wh1te909
10256864e4 improve typing support 2021-02-23 09:50:57 +00:00
wh1te909
85d010615d black 2021-02-23 08:27:22 +00:00
wh1te909
cd1cb186be deploy docs with gh actions 2021-02-23 08:24:19 +00:00
wh1te909
4458354d70 more docs 2021-02-23 08:14:25 +00:00
wh1te909
0f27da8808 add management command to show outdated agents 2021-02-22 20:31:57 +00:00
wh1te909
dd76bfa3c2 fix python build from source 2021-02-22 10:06:47 +00:00
wh1te909
5780a66f7d fix python build from source 2021-02-22 10:05:46 +00:00
wh1te909
d4342c034c add test for run_script 2021-02-22 09:46:48 +00:00
wh1te909
1ec43f2530 refactor to remove duplicate code 2021-02-22 08:46:59 +00:00
wh1te909
3c300d8fdf remove print 2021-02-22 08:45:57 +00:00
wh1te909
23119b55d1 isort 2021-02-22 08:43:21 +00:00
wh1te909
c8fb0e8f8a remove unneeded imports that are now builtin in python 3.9 2021-02-22 08:05:30 +00:00
sadnub
0ec32a77ef make check results chart more responsive with large amounts of data 2021-02-21 19:00:43 -05:00
sadnub
52921bfce8 black 2021-02-21 18:56:14 -05:00
sadnub
960b929097 move annotation labels to the left for check history chart 2021-02-21 18:51:45 -05:00
sadnub
d4ce23eced adding tests to agent alert actions and a bunch of fixes 2021-02-21 18:45:34 -05:00
wh1te909
6925510f44 no cgo 2021-02-21 10:18:05 +00:00
wh1te909
9827ad4c22 add isort to dev reqs 2021-02-21 10:17:47 +00:00
wh1te909
ef8aaee028 Release 0.4.16 2021-02-21 09:58:41 +00:00
wh1te909
3d7d39f248 bump version 2021-02-21 09:58:28 +00:00
wh1te909
3eac620560 add go mod to fix docker agent exe 2021-02-21 09:56:16 +00:00
wh1te909
ab17006956 Release 0.4.15 2021-02-21 08:37:01 +00:00
wh1te909
bfc6889ee9 bump version 2021-02-21 08:36:44 +00:00
wh1te909
0ec0b4a044 python 3.9 2021-02-21 07:57:36 +00:00
wh1te909
f1a523f327 update reqs 2021-02-21 07:37:36 +00:00
sadnub
4181449aea fix tests 2021-02-20 23:18:54 -05:00
sadnub
e192f8db52 dont create alerts if not configured to do so. Added some more tests 2021-02-20 23:01:19 -05:00
wh1te909
8097c681ac Release 0.4.14 2021-02-20 22:35:35 +00:00
wh1te909
f45938bdd5 bump version 2021-02-20 22:35:14 +00:00
wh1te909
6ea4e97eca fix script args 2021-02-20 22:33:10 +00:00
wh1te909
f274c8e837 add prune alerts to server maintenance tool 2021-02-20 11:01:04 +00:00
wh1te909
335e571485 add optional --force flag to update.sh 2021-02-20 10:33:21 +00:00
wh1te909
a11616aace Release 0.4.13 2021-02-20 10:15:51 +00:00
wh1te909
883acadbc4 bump versions 2021-02-20 10:00:12 +00:00
wh1te909
f51e6a3fcf isort imports 2021-02-20 09:54:01 +00:00
wh1te909
371e081c0d remove un-used imports 2021-02-20 09:47:19 +00:00
wh1te909
6f41b3bf1c change wording 2021-02-20 09:36:36 +00:00
wh1te909
c1d74a6c9e improve alerts manager table UI 2021-02-20 08:56:19 +00:00
wh1te909
24eaa6796e remove old field 2021-02-20 08:40:06 +00:00
wh1te909
1521e3b620 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-02-20 03:44:59 +00:00
wh1te909
b6ff38dd62 fix date sorting and timezone fixes #283 2021-02-20 03:44:42 +00:00
sadnub
44ea9ac03c black 2021-02-19 22:43:48 -05:00
wh1te909
4c2701505b Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-02-20 03:42:35 +00:00
sadnub
9022fe18da add some alerts tests and some fixes 2021-02-19 22:40:00 -05:00
wh1te909
63be349f8b update quasar 2021-02-20 03:37:30 +00:00
Tragic Bronson
c40256a290 Merge pull request #286 from bradhawkins85/patch-5
Update installer.ps1
2021-02-19 00:52:09 -08:00
bradhawkins85
33ecb8ec52 Update installer.ps1
Add windows defender exclusions before downloading or installing the agent.
2021-02-19 18:04:24 +10:00
wh1te909
82d62a0015 improve mesh update detection 2021-02-18 08:53:02 +00:00
wh1te909
6278240526 Release 0.4.12 2021-02-18 07:36:31 +00:00
wh1te909
8c2dc5f57d typo 2021-02-18 07:34:28 +00:00
wh1te909
2e5868778a Release 0.4.11 2021-02-17 23:35:00 +00:00
wh1te909
a10b8dab9b bump versions 2021-02-17 23:31:49 +00:00
wh1te909
92f4f7ef59 go 1.16 2021-02-17 23:26:56 +00:00
wh1te909
31257bd5cb Release 0.4.10 2021-02-17 19:35:51 +00:00
wh1te909
bb6510862f bump version 2021-02-17 19:35:24 +00:00
sadnub
797ecf0780 implement exclude workstations and servers. Fix excluding individual clients, sites, and agents 2021-02-17 14:19:07 -05:00
sadnub
f9536dc67f allow viewing alert script results on resolved alerts 2021-02-17 13:29:51 -05:00
sadnub
e8b95362af fix automation manager UI. Modify agent/check/task table alert checkboxes to show if it is managed by an alert template 2021-02-17 13:29:51 -05:00
sadnub
bdc39ad4ec Create alerting.md 2021-02-16 23:11:34 -05:00
wh1te909
4a202c5585 Release 0.4.9 2021-02-16 23:39:22 +00:00
wh1te909
3c6b321f73 bump version 2021-02-16 23:38:38 +00:00
wh1te909
cb29b52799 remove unused import 2021-02-16 23:14:03 +00:00
wh1te909
7e48015a54 Release 0.4.8 2021-02-16 18:57:37 +00:00
wh1te909
9ed3abf932 fix tests 2021-02-16 18:55:55 +00:00
wh1te909
61762828a3 fix typo 2021-02-16 18:50:42 +00:00
wh1te909
59beabe5ac bump versions 2021-02-16 18:47:51 +00:00
wh1te909
0b30faa28c decrease pause timeout for installer 2021-02-16 18:45:59 +00:00
wh1te909
d12d49b93f update quasar [skip ci] 2021-02-16 17:24:48 +00:00
wh1te909
f1d64d275a update go [skip ci] 2021-02-16 17:15:13 +00:00
wh1te909
d094eeeb03 update natsapi [skip ci] 2021-02-16 17:09:05 +00:00
wh1te909
be25af658e partially implement #222 2021-02-16 09:22:28 +00:00
wh1te909
794f52c229 delete remove salt task 2021-02-16 08:46:42 +00:00
wh1te909
5d4dc4ed4c change monitoragents func to run async 2021-02-16 08:33:54 +00:00
wh1te909
e49d97b898 disable loading spinner during alert poll 2021-02-16 01:06:42 +00:00
wh1te909
b6b4f1ba62 fix query 2021-02-16 01:06:12 +00:00
wh1te909
653d476716 back to http requests wh1te909/rmmagent@278b3a8a55 2021-02-14 03:12:22 +00:00
sadnub
48b855258c improve test coverage for automation 2021-02-13 16:14:15 -05:00
wh1te909
c7efdaf5f9 change run_script to take the script PK instead of entire script model 2021-02-13 19:41:38 +00:00
wh1te909
22523ed3d3 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-02-13 19:40:41 +00:00
wh1te909
33c602dd61 update reqs 2021-02-13 19:40:18 +00:00
sadnub
e2a5509b76 add missing task tests in automation and alerts 2021-02-13 14:38:03 -05:00
wh1te909
61a0fa1a89 fix runscript email 2021-02-12 22:50:21 +00:00
wh1te909
a35bd8292b catch service error 2021-02-12 19:24:06 +00:00
Tragic Bronson
06c8ae60e3 Merge pull request #269 from sadnub/feature-alerts
WIP - Feature alerts
2021-02-12 10:48:27 -08:00
sadnub
deeab1f845 fix/add tests for check thresholds 2021-02-12 13:39:46 -05:00
sadnub
da81c4c987 fix failure and resolved action timeouts 2021-02-12 12:49:16 -05:00
sadnub
d180f1b2d5 fix check threshold modals and add client/serverside validation. Allow viewing alert script results in alerts overview. Fix diskspace check history computation. other fixes and improvements 2021-02-12 12:37:53 -05:00
sadnub
526135629c fix some typos and implement runscript and runscriptfull on agent function 2021-02-11 20:11:03 -05:00
sadnub
6b9493e057 reworked alerts a bit to not need AgentOutage table. Implemented resolve/failure script running on alert. also added script arg support for alert actions. Allow scripts to be run on any running agent 2021-02-11 20:11:03 -05:00
sadnub
9bb33d2afc fix tests 2021-02-11 20:11:03 -05:00
sadnub
7421138533 finish alerts views testing. Minor bug fixes 2021-02-11 20:11:03 -05:00
sadnub
d0800c52bb black 2021-02-11 20:11:03 -05:00
sadnub
913fcd4df2 fix tests and added soem minor fixes 2021-02-11 20:11:03 -05:00
sadnub
83322cc725 fix automation tests. minor fixes 2021-02-11 20:11:03 -05:00
sadnub
5944501feb fix migrations for real this time 2021-02-11 20:11:03 -05:00
sadnub
17e3603d3d implement overriding email/sms settings with alert templates 2021-02-11 20:11:03 -05:00
sadnub
95be43ae47 fix alerts icon and fix policycheck/task status. added resolved alerts actions 2021-02-11 20:11:03 -05:00
sadnub
feb91cbbaa fix migration issue and consolidate migrations a bit 2021-02-11 20:11:03 -05:00
sadnub
79409af168 implement alert periodic notifications for agent, task, and check. implement sms/email functionality for autotasks 2021-02-11 20:11:03 -05:00
sadnub
5dbfb64822 add handle alerts functions to agents, checks, and tasks. Minor fixes 2021-02-11 20:11:03 -05:00
sadnub
5e7ebf5e69 added relation view and a number of bug fixes 2021-02-11 20:11:03 -05:00
sadnub
e73215ca74 implement alert template exclusions 2021-02-11 20:11:03 -05:00
sadnub
a5f123b9ce bug fixes with automated manager deleting policies and adding 2021-02-11 20:11:03 -05:00
sadnub
ac058e9675 fixed alerts manager table, added celery task to unsnooze alerts, added bulk actions to alerts overview 2021-02-11 20:11:02 -05:00
sadnub
371b764d1d added new alert option for dashboard alerts, added actions to be run if alert triggered on agent, random fixes 2021-02-11 20:11:02 -05:00
sadnub
66d7172e09 reworked policy add for client, site, and agent. removed vue unit tests, added alertign to auto tasks, added edit autotask capabilities for certain fields, moved policy generation logic to save method on Client, Site, Agent, Policy models 2021-02-11 20:11:02 -05:00
sadnub
99d3a8a749 more alerts work 2021-02-11 20:11:02 -05:00
sadnub
db5ff372a4 alerts overview work 2021-02-11 20:11:02 -05:00
sadnub
3fe83f81be migrations fix and finishing up automation manager rework 2021-02-11 20:11:02 -05:00
sadnub
669e638fd6 automation manager rework start 2021-02-11 20:11:02 -05:00
sadnub
f1f999f3b6 more alerts work 2021-02-11 20:11:02 -05:00
sadnub
6f3b6fa9ce alerts wip 2021-02-11 20:11:02 -05:00
wh1te909
938f945301 drop min ram req 2021-02-12 00:23:22 +00:00
Tragic Bronson
e3efb2aad6 Merge pull request #273 from wh1te909/dependabot/pip/dot-devcontainer/cryptography-3.3.2
Bump cryptography from 3.2.1 to 3.3.2 in /.devcontainer
2021-02-11 13:47:26 -08:00
Tragic Bronson
1e678c0d78 Merge pull request #272 from wh1te909/dependabot/pip/api/tacticalrmm/cryptography-3.3.2
Bump cryptography from 3.3.1 to 3.3.2 in /api/tacticalrmm
2021-02-11 13:47:14 -08:00
wh1te909
a59c111140 add community script 2021-02-11 17:58:59 +00:00
Tragic Bronson
a8b2a31bed Merge pull request #275 from bradhawkins85/patch-3
Create Display Message To User.ps1
2021-02-11 09:45:55 -08:00
bradhawkins85
37402f9ee8 Create Display Message To User.ps1 2021-02-11 11:16:28 +10:00
dependabot[bot]
e7b5ecb40f Bump cryptography from 3.2.1 to 3.3.2 in /.devcontainer
Bumps [cryptography](https://github.com/pyca/cryptography) from 3.2.1 to 3.3.2.
- [Release notes](https://github.com/pyca/cryptography/releases)
- [Changelog](https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pyca/cryptography/compare/3.2.1...3.3.2)

Signed-off-by: dependabot[bot] <support@github.com>
2021-02-10 02:34:29 +00:00
dependabot[bot]
c817ef04b9 Bump cryptography from 3.3.1 to 3.3.2 in /api/tacticalrmm
Bumps [cryptography](https://github.com/pyca/cryptography) from 3.3.1 to 3.3.2.
- [Release notes](https://github.com/pyca/cryptography/releases)
- [Changelog](https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pyca/cryptography/compare/3.3.1...3.3.2)

Signed-off-by: dependabot[bot] <support@github.com>
2021-02-10 01:51:52 +00:00
wh1te909
f52b18439c update ssh script 2021-02-09 19:08:24 +00:00
wh1te909
1e03c628d5 Release 0.4.7 2021-02-06 01:04:02 +00:00
wh1te909
71fb39db1f bump versions 2021-02-06 00:59:49 +00:00
wh1te909
bcfb3726b0 update restore script to work on debian 10 2021-02-06 00:40:25 +00:00
wh1te909
c6e9e29671 increase uwsgi buffer size 2021-02-06 00:39:22 +00:00
wh1te909
1bfefcce39 fix backup 2021-02-06 00:38:29 +00:00
wh1te909
22488e93e1 approve updates when triggered manually 2021-02-03 23:23:34 +00:00
wh1te909
244b89f035 exclude migrations from black 2021-02-03 20:11:49 +00:00
wh1te909
1f9a241b94 Release 0.4.6 2021-02-02 19:33:30 +00:00
wh1te909
03641aae42 bump versions 2021-02-02 19:20:24 +00:00
wh1te909
a2bdd113cc update natsapi [skip ci] 2021-02-02 19:19:29 +00:00
wh1te909
a92e2f3c7b more winupdate fixes 2021-02-02 09:42:12 +00:00
wh1te909
97766b3a57 more superseded updates cleanup 2021-02-02 01:12:20 +00:00
wh1te909
9ef4c3bb06 more pending actions fix 2021-02-01 21:23:37 +00:00
wh1te909
d82f0cd757 Release 0.4.5 2021-02-01 20:57:53 +00:00
wh1te909
5f529e2af4 bump versions 2021-02-01 20:57:35 +00:00
wh1te909
beadd9e02b fix duplicate pending actions being created 2021-02-01 20:56:05 +00:00
wh1te909
72543789cb Release 0.4.4 2021-02-01 19:24:51 +00:00
wh1te909
5789439fa9 bump versions 2021-02-01 19:23:03 +00:00
wh1te909
f549126bcf update natsapi 2021-02-01 19:20:32 +00:00
wh1te909
7197548bad new pipelines vm 2021-01-31 02:42:10 +00:00
wh1te909
241fde783c add back pending actions for agent updates 2021-01-31 02:06:55 +00:00
wh1te909
2b872cd1f4 remove old views 2021-01-31 00:19:10 +00:00
wh1te909
a606fb4d1d add some deps to install for stripped down vps [skip ci] 2021-01-30 21:31:01 +00:00
wh1te909
9f9c6be38e update natsapi [skip ci] github.com/wh1te909/rmmagent@47b25c29362f0639ec606571f679df1f523e69a9 2021-01-30 06:42:20 +00:00
wh1te909
01ee524049 Release 0.4.3 2021-01-30 04:45:10 +00:00
wh1te909
af9cb65338 bump version 2021-01-30 04:44:41 +00:00
wh1te909
8aa11c580b move agents monitor task to go 2021-01-30 04:39:15 +00:00
wh1te909
ada627f444 forgot to enable natsapi during install 2021-01-30 04:28:27 +00:00
wh1te909
a7b6d338c3 update reqs 2021-01-30 02:06:56 +00:00
wh1te909
9f00538b97 fix tests 2021-01-29 23:38:59 +00:00
wh1te909
a085015282 increase timeout for security eventlogs 2021-01-29 23:34:16 +00:00
wh1te909
0b9c220fbb remove old task 2021-01-29 20:36:28 +00:00
wh1te909
0e3d04873d move wmi celery task to golang 2021-01-29 20:10:52 +00:00
wh1te909
b7578d939f add test for community script shell type 2021-01-29 09:37:34 +00:00
wh1te909
b5c28de03f Release 0.4.2 2021-01-29 08:23:06 +00:00
wh1te909
e17d25c156 bump versions 2021-01-29 08:12:03 +00:00
wh1te909
c25dc1b99c also override shell during load community scripts 2021-01-29 07:39:08 +00:00
Tragic Bronson
a493a574bd Merge pull request #265 from saulens22/patch-1
Fix "TRMM Defender Exclusions" script shell type
2021-01-28 23:36:03 -08:00
Saulius Kazokas
4284493dce Fix "TRMM Defender Exclusions" script shell type 2021-01-29 07:10:10 +02:00
wh1te909
25059de8e1 fix superseded windows defender updates 2021-01-29 02:37:51 +00:00
wh1te909
1731b05ad0 remove old serializers 2021-01-29 02:25:31 +00:00
wh1te909
e80dc663ac remove unused func 2021-01-29 02:22:06 +00:00
wh1te909
39988a4c2f cleanup an old view 2021-01-29 02:15:27 +00:00
wh1te909
415bff303a add some debug for unsupported agents 2021-01-29 01:22:35 +00:00
wh1te909
a65eb62a54 checkrunner changes wh1te909/rmmagent@10a0935f1b 2021-01-29 00:34:18 +00:00
wh1te909
03b2982128 update build flags 2021-01-28 23:11:32 +00:00
wh1te909
bff0527857 Release 0.4.1 2021-01-27 07:48:14 +00:00
wh1te909
f3b7634254 fix tests 2021-01-27 07:45:00 +00:00
wh1te909
6a9593c0b9 bump versions 2021-01-27 07:35:11 +00:00
wh1te909
edb785b8e5 prepare for agent 1.4.0 2021-01-27 07:11:49 +00:00
wh1te909
26d757b50a checkrunner interval changes wh1te909/rmmagent@7f131d54cf 2021-01-27 06:38:42 +00:00
wh1te909
535079ee87 update natsapi 2021-01-26 20:54:30 +00:00
wh1te909
ac380c29c1 fix last response sorting closes #258 2021-01-26 19:58:08 +00:00
wh1te909
3fd212f26c more optimizations 2021-01-25 21:05:59 +00:00
wh1te909
04a3abc651 fix tests 2021-01-25 20:46:22 +00:00
wh1te909
6caf85ddd1 optimize some queries 2021-01-25 20:27:20 +00:00
wh1te909
16e4071508 use error msg from backend 2021-01-25 19:57:50 +00:00
wh1te909
69e7c4324b start mkdocs 2021-01-25 19:55:48 +00:00
wh1te909
a1c4a8cbe5 fix tab refresh 2021-01-23 06:27:33 +00:00
wh1te909
e37f6cfda7 Release 0.4.0 2021-01-23 03:46:22 +00:00
wh1te909
989c804409 bump version 2021-01-23 03:45:49 +00:00
sadnub
7345bc3c82 fix image build script 2021-01-22 20:04:30 -05:00
sadnub
69bee35700 remove winupdate container from dev 2021-01-22 20:03:30 -05:00
sadnub
598e24df7c remove salt and celery-winupdate containers 2021-01-22 19:57:58 -05:00
sadnub
0ae669201e Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-01-22 19:26:03 -05:00
wh1te909
f52a8a4642 black 2021-01-23 00:02:26 +00:00
wh1te909
9c40b61ef2 fix test 2021-01-22 23:41:10 +00:00
wh1te909
72dabcda83 fix a test 2021-01-22 23:29:18 +00:00
wh1te909
161a06dbcc don't change tab when using site refresh button 2021-01-22 23:27:28 +00:00
wh1te909
8ed3d4e70c update quasar 2021-01-22 23:26:44 +00:00
wh1te909
a4223ccc8a bump agent and mesh vers 2021-01-22 22:56:33 +00:00
wh1te909
ca85923855 add purge 2021-01-22 09:34:08 +00:00
wh1te909
52bfe7c493 update natsapi 2021-01-22 00:41:27 +00:00
wh1te909
4786bd0cbe create meshusername during install 2021-01-22 00:40:09 +00:00
wh1te909
cadab160ff add check to remove salt 2021-01-21 23:58:31 +00:00
wh1te909
6a7f17b2b0 more salt cleanup 2021-01-21 00:00:34 +00:00
wh1te909
4986a4d775 more salt cleanup 2021-01-20 23:22:02 +00:00
wh1te909
903af0c2cf goodbye salt, you've served us well 2021-01-20 22:11:54 +00:00
wh1te909
3282fa803c move to go for chocolatey wh1te909/rmmagent@cebde22fa0 2021-01-19 23:43:37 +00:00
wh1te909
67cc47608d add hosts check to migration doc 2021-01-19 23:25:35 +00:00
wh1te909
0411704b8b update rmmagent and resty 2021-01-19 23:10:50 +00:00
wh1te909
1de85b2c69 more winupdate rework wh1te909/rmmagent@08ec2f9191 2021-01-19 03:14:54 +00:00
wh1te909
33b012f29d typo 2021-01-19 03:11:07 +00:00
wh1te909
1357584df3 start winupdate rework 2021-01-19 00:59:38 +00:00
sadnub
e15809e271 Merge branch 'develop' of https://github.com/sadnub/tacticalrmm into develop 2021-01-18 09:17:17 -05:00
wh1te909
0da1950427 Release 0.3.3 2021-01-18 11:01:25 +00:00
wh1te909
e590b921be fix #252 2021-01-18 11:00:50 +00:00
wh1te909
09462692f5 Release 0.3.2 2021-01-18 10:00:45 +00:00
wh1te909
c1d1b5f762 bump version 2021-01-18 10:00:26 +00:00
wh1te909
6b9c87b858 feat: set agent table tab default #249 2021-01-18 09:57:50 +00:00
wh1te909
485b6eb904 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-01-18 09:32:00 +00:00
wh1te909
057630bdb5 fix agent table sort #250 2021-01-18 09:31:28 +00:00
wh1te909
6b02873b30 fix agent table sort #250 2021-01-18 09:12:01 +00:00
wh1te909
0fa0fc6d6b add json linter to migration docs 2021-01-17 18:09:47 +00:00
wh1te909
339ec07465 Release 0.3.1 2021-01-17 05:48:27 +00:00
wh1te909
cd2e798fea bump versions 2021-01-17 05:43:34 +00:00
wh1te909
d5cadbeae2 split agent update into chunks 2021-01-17 05:42:38 +00:00
wh1te909
8046a3ccae Release 0.3.0 2021-01-17 02:16:06 +00:00
wh1te909
bf91d60b31 natsapi bin 1.0.0 2021-01-17 02:07:53 +00:00
wh1te909
539c047ec8 update go 2021-01-17 01:53:45 +00:00
wh1te909
290c18fa87 bump versions 2021-01-17 01:22:08 +00:00
wh1te909
98c46f5e57 fix domain 2021-01-17 01:21:21 +00:00
wh1te909
f8bd5b5b4e update configs/scripts and add migration docs for 0.3.0 2021-01-17 01:16:28 +00:00
wh1te909
816d32edad black 2021-01-16 23:34:55 +00:00
wh1te909
8453835c05 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-01-16 23:32:54 +00:00
wh1te909
9328c356c8 possible fix for mesh scaling 2021-01-16 23:32:46 +00:00
sadnub
89e3c1fc94 remove my print statements 2021-01-16 17:46:56 -05:00
sadnub
67e54cd15d Remove pending action duplicates and make policy check/task propogation more efficient 2021-01-16 17:46:56 -05:00
sadnub
278ea24786 improve dev env 2021-01-16 17:46:56 -05:00
sadnub
aba1662631 remove my print statements 2021-01-16 17:46:30 -05:00
sadnub
61eeb60c19 Remove pending action duplicates and make policy check/task propogation more efficient 2021-01-16 17:44:27 -05:00
wh1te909
5e9a8f4806 new natsapi binary 2021-01-16 21:55:06 +00:00
wh1te909
4cb274e9bc update to celery 5 2021-01-16 21:52:30 +00:00
wh1te909
8b9b1a6a35 update mesh docker conf 2021-01-16 21:50:29 +00:00
sadnub
2655964113 improve dev env 2021-01-16 11:20:24 -05:00
wh1te909
188bad061b add wmi task 2021-01-16 10:31:00 +00:00
wh1te909
3af4c329aa update reqs 2021-01-16 09:42:03 +00:00
wh1te909
6c13395f7d add debug 2021-01-16 09:41:27 +00:00
wh1te909
77b32ba360 remove import 2021-01-16 09:39:15 +00:00
sadnub
91dba291ac nats-api fixes 2021-01-15 23:41:21 -05:00
sadnub
a6bc293640 Finish up check charts 2021-01-15 22:11:40 -05:00
sadnub
53882d6e5f fix dev port 2021-01-15 21:25:32 -05:00
sadnub
d68adfbf10 docker nats-api rework 2021-01-15 21:11:27 -05:00
sadnub
498a392d7f check graphs wip 2021-01-15 21:10:25 -05:00
sadnub
740f6c05db docker cli additions 2021-01-15 21:10:25 -05:00
wh1te909
d810ce301f update natsapi flags 2021-01-16 00:01:31 +00:00
wh1te909
5ef6a14d24 add nats-api binary 2021-01-15 18:21:25 +00:00
wh1te909
a13f6f1e68 move recovery to natsapi 2021-01-15 10:19:01 +00:00
wh1te909
d2d0f1aaee fix tests 2021-01-15 09:57:46 +00:00
wh1te909
e64c72cc89 #234 sort proc mem using bytes wh1te909/rmmagent@04470dd4ce 2021-01-15 09:44:18 +00:00
wh1te909
9ab915a08b Release 0.2.23 2021-01-14 02:43:56 +00:00
wh1te909
e26fbf0328 bump versions 2021-01-14 02:29:14 +00:00
wh1te909
d9a52c4a2a update reqs 2021-01-14 02:27:40 +00:00
wh1te909
7b2ec90de9 feat: double-click agent action #232 2021-01-14 02:21:08 +00:00
wh1te909
d310bf8bbf add community scripts from dinger #242 2021-01-14 01:17:58 +00:00
wh1te909
2abc6cc939 partially fix sort 2021-01-14 00:01:08 +00:00
sadnub
56d4e694a2 fix annotations and error for the check chart 2021-01-13 18:43:09 -05:00
wh1te909
5f002c9cdc bump mesh 2021-01-13 23:35:14 +00:00
wh1te909
759daf4b4a add wording 2021-01-13 23:35:01 +00:00
wh1te909
3a8d9568e3 split some tasks into chunks to reduce load 2021-01-13 22:26:54 +00:00
wh1te909
ff22a9d94a fix deployments in docker 2021-01-13 22:19:09 +00:00
sadnub
a6e42d5374 fix removing pendingactions that are outstanding 2021-01-13 13:21:09 -05:00
wh1te909
a2f74e0488 add natsapi flags 2021-01-12 21:14:43 +00:00
wh1te909
ee44240569 black 2021-01-12 21:06:44 +00:00
wh1te909
d0828744a2 update nginx conf
(cherry picked from commit bf61e27f8a)
2021-01-12 06:38:52 +00:00
wh1te909
6e2e576b29 start natsapi 2021-01-12 06:32:00 +00:00
wh1te909
bf61e27f8a update nginx conf 2021-01-12 03:02:03 +00:00
Tragic Bronson
c441c30b46 Merge pull request #243 from sadnub/develop
Move Check Runs from Audit to its own table
2021-01-11 00:29:59 -08:00
Tragic Bronson
0e741230ea Merge pull request #242 from dinger1986/develop
Added some scripts checks etc
2021-01-11 00:29:47 -08:00
sadnub
1bfe9ac2db complete other pending actions with same task if task is deleted 2021-01-10 20:19:38 -05:00
sadnub
6812e72348 fix process sorting 2021-01-10 19:35:39 -05:00
sadnub
b6449d2f5b black 2021-01-10 16:33:10 -05:00
sadnub
7e3ea20dce add some tests and bug fixes 2021-01-10 16:27:48 -05:00
sadnub
c9d6fe9dcd allow returning all check data 2021-01-10 15:14:02 -05:00
sadnub
4a649a6b8b black 2021-01-10 14:47:34 -05:00
sadnub
8fef184963 add check history graph for cpu, memory, and diskspace 2021-01-10 14:15:05 -05:00
sadnub
69583ca3c0 docker dev fixes 2021-01-10 13:17:49 -05:00
dinger1986
6038a68e91 Win Defender exclusions for Tactical 2021-01-10 17:56:12 +00:00
dinger1986
fa8bd8db87 Manually reinstall Mesh just incase 2021-01-10 17:54:41 +00:00
dinger1986
18b4f0ed0f Runs DNS check on host as defined 2021-01-10 17:53:53 +00:00
dinger1986
461f9d66c9 Disable Faststartup on Windows 10 2021-01-10 17:51:33 +00:00
dinger1986
2155103c7a Check Win Defender for detections etc 2021-01-10 17:51:06 +00:00
dinger1986
c9a6839c45 Clears Win Defender log files 2021-01-10 17:50:13 +00:00
dinger1986
9fbe331a80 Allows the following Apps access by Win Defender 2021-01-10 17:49:36 +00:00
dinger1986
a56389c4ce Sync time with DC 2021-01-10 17:46:47 +00:00
dinger1986
64656784cb Powershell Speedtest 2021-01-10 17:46:00 +00:00
dinger1986
6eff2c181e Install RDP and change power config 2021-01-10 17:44:23 +00:00
dinger1986
1aa48c6d62 Install OpenSSH on PCs 2021-01-10 17:42:11 +00:00
dinger1986
c7ca1a346d Enable Windows Defender and set preferences 2021-01-10 17:40:06 +00:00
dinger1986
fa0ec7b502 check Duplicati Backup is running properly 2021-01-10 17:38:06 +00:00
dinger1986
768438c136 Checks disks for errors reported in event viewer 2021-01-10 17:36:42 +00:00
dinger1986
9badea0b3c Update DiskStatus.ps1
Checks local disks for errors reported in event viewer within the last 24 hours
2021-01-10 17:35:50 +00:00
dinger1986
43263a1650 Add files via upload 2021-01-10 17:33:48 +00:00
wh1te909
821e02dc75 update mesh docker conf 2021-01-10 00:20:44 +00:00
wh1te909
ed011ecf28 remove old mesh overrides #217 2021-01-10 00:15:11 +00:00
wh1te909
d861de4c2f update community scripts 2021-01-09 22:26:02 +00:00
Tragic Bronson
3a3b2449dc Merge pull request #241 from RVL-Solutions/develop
Create Windows10Upgrade.ps1
2021-01-09 14:12:05 -08:00
Ruben van Leusden
d2614406ca Create Windows10Upgrade.ps1
Shared by Kyt through Discord
2021-01-08 22:20:33 +01:00
Tragic Bronson
0798d098ae Merge pull request #238 from wh1te909/revert-235-master
Revert "Create Windows10Upgrade.ps1"
2021-01-08 10:38:33 -08:00
Tragic Bronson
dab7ddc2bb Revert "Create Windows10Upgrade.ps1" 2021-01-08 10:36:42 -08:00
Tragic Bronson
081a96e281 Merge pull request #235 from RVL-Solutions/master
Create Windows10Upgrade.ps1
2021-01-08 10:36:19 -08:00
wh1te909
a7dd881d79 Release 0.2.22 2021-01-08 18:16:17 +00:00
wh1te909
8134d5e24d remove threading 2021-01-08 18:15:55 +00:00
Ruben van Leusden
ba6756cd45 Create Windows10Upgrade.ps1 2021-01-06 23:19:14 +01:00
Tragic Bronson
5d8fce21ac Merge pull request #230 from wh1te909/dependabot/npm_and_yarn/web/axios-0.21.1
Bump axios from 0.21.0 to 0.21.1 in /web
2021-01-05 13:51:18 -08:00
dependabot[bot]
e7e4a5bcd4 Bump axios from 0.21.0 to 0.21.1 in /web
Bumps [axios](https://github.com/axios/axios) from 0.21.0 to 0.21.1.
- [Release notes](https://github.com/axios/axios/releases)
- [Changelog](https://github.com/axios/axios/blob/v0.21.1/CHANGELOG.md)
- [Commits](https://github.com/axios/axios/compare/v0.21.0...v0.21.1)

Signed-off-by: dependabot[bot] <support@github.com>
2021-01-05 15:54:54 +00:00
wh1te909
55f33357ea Release 0.2.21 2021-01-05 08:55:54 +00:00
wh1te909
90568bba31 bump versions 2021-01-05 08:55:08 +00:00
wh1te909
5d6e2dc2e4 feat: add send script results by email #212 2021-01-05 08:52:17 +00:00
sadnub
6bb33f2559 fix unassigned scripts not show if not categories are present 2021-01-04 20:22:42 -05:00
wh1te909
ced92554ed update community scripts 2021-01-04 22:00:17 +00:00
Tragic Bronson
dff3383158 Merge pull request #228 from azulskyknight/patch-2
Create SetHighPerformancePowerProfile.ps1
2021-01-04 13:42:20 -08:00
Tragic Bronson
bf03c89cb2 Merge pull request #227 from azulskyknight/patch-1
Create ResetHighPerformancePowerProfiletoDefaults.ps1
2021-01-04 13:42:10 -08:00
azulskyknight
9f1484bbef Create SetHighPerformancePowerProfile.ps1
Script sets the High Performance Power profile to the active power profile.
Use this to keep machines from falling asleep.
2021-01-04 13:21:00 -07:00
azulskyknight
3899680e26 Create ResetHighPerformancePowerProfiletoDefaults.ps1
Script resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values.
It also re-indexes the AC and DC power profiles into their default order.
2021-01-04 13:19:03 -07:00
sadnub
6bb2eb25a1 sort script folders alphabetically and fix showing community scripts when no user scripts present 2021-01-03 21:01:50 -05:00
sadnub
f8dfd8edb3 Make pip copy the binaries versus symlink them in dev env 2021-01-03 20:15:40 -05:00
sadnub
042be624a3 Update .dockerignore 2021-01-03 15:16:13 -05:00
sadnub
6bafa4c79a fix mesh init on dev 2021-01-03 15:15:43 -05:00
wh1te909
58b42fac5c Release 0.2.20 2021-01-03 09:13:28 +00:00
wh1te909
3b47b9558a let python calculate default threadpool workers based on cpu count 2021-01-03 09:12:38 +00:00
wh1te909
ccf9636296 Release 0.2.19 2021-01-02 09:34:12 +00:00
wh1te909
96942719f2 bump versions 2021-01-02 09:32:04 +00:00
wh1te909
69cf1c1adc update quasar 2021-01-02 07:38:33 +00:00
wh1te909
d77cba40b8 black 2021-01-02 07:26:34 +00:00
wh1te909
968735b555 fix scroll 2021-01-02 07:21:10 +00:00
wh1te909
ceed9d29eb task changes 2021-01-02 07:20:52 +00:00
sadnub
41329039ee add .env example 2021-01-02 00:09:56 -05:00
sadnub
f68b102ca8 Add Dev Containers 2021-01-02 00:05:54 -05:00
wh1te909
fa36e54298 change agent update 2021-01-02 01:30:51 +00:00
wh1te909
b689f57435 black 2021-01-01 00:51:44 +00:00
sadnub
885fa0ff56 add api tests to core app 2020-12-31 17:18:25 -05:00
Tragic Bronson
303acb72a3 Merge pull request #225 from sadnub/develop
add folder view to script manager
2020-12-31 13:12:33 -08:00
sadnub
b2a46cd0cd add folder view to script manager 2020-12-31 15:46:44 -05:00
wh1te909
5a5ecb3ee3 install curl/wget first fixes #224 2020-12-30 19:04:14 +00:00
wh1te909
60b4ab6a63 fix logging 2020-12-22 05:15:44 +00:00
wh1te909
e4b096a08f fix logging 2020-12-22 05:14:44 +00:00
wh1te909
343f55049b prevent duplicate cpu/mem checks from being created 2020-12-19 20:38:22 +00:00
wh1te909
6b46025261 Release 0.2.18 2020-12-19 08:44:45 +00:00
wh1te909
5ea503f23e bump version 2020-12-19 08:43:47 +00:00
wh1te909
ce95f9ac23 add codestyle to tests 2020-12-19 08:24:47 +00:00
wh1te909
c3fb87501b black 2020-12-19 08:20:12 +00:00
wh1te909
dc6a343612 bump mesh 2020-12-19 07:55:39 +00:00
wh1te909
3a61053957 update reqs 2020-12-19 07:50:32 +00:00
wh1te909
570129e4d4 add debian 10 to readme 2020-12-19 07:50:05 +00:00
wh1te909
3315c7045f if ubuntu, force 20.04 2020-12-19 07:45:21 +00:00
wh1te909
5ae50e242c always run npm install during update 2020-12-18 21:59:23 +00:00
Tragic Bronson
bbcf449719 Merge pull request #214 from mckinnon81/debian
Updated install.sh for Debian
2020-12-18 13:56:14 -08:00
Matthew McKinnon
aab10f7184 Removed certbot test-cert. Not needed 2020-12-18 08:32:40 +10:00
Matthew McKinnon
8d43488cb8 Updated install.sh for Debian
Updated api\tacticalrmm\accounts\views.py valid_window=10
2020-12-18 08:28:01 +10:00
Tragic Bronson
0a9c647e19 Merge pull request #211 from sadnub/develop
Fix default policies
2020-12-16 13:51:37 -08:00
wh1te909
40db5d4aa8 remove debug print 2020-12-16 21:50:43 +00:00
Josh
9254532baa fix applying default policies in certain situations 2020-12-16 20:38:36 +00:00
Josh
7abed47cf0 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2020-12-16 19:08:12 +00:00
Tragic Bronson
5c6ac758f7 Merge pull request #210 from mckinnon81/scripts
Fixed Paths in ClearFirefoxCache.ps1 & ClearGoogleChromeCache.ps1
2020-12-16 09:36:33 -08:00
Matthew McKinnon
007677962c Fixed Paths in ClearFirefoxCache.ps1 & ClearGoogleChromeCache.ps1 2020-12-16 22:32:04 +10:00
wh1te909
9c4aeab64a back to develop 2020-12-16 10:47:05 +00:00
wh1te909
48e6fc0efe test coveralls 2 2020-12-16 10:41:39 +00:00
wh1te909
c8be713d11 test coveralls 2020-12-16 10:38:00 +00:00
wh1te909
ae887c8648 switch to branch head for coveralls 2020-12-16 10:20:50 +00:00
wh1te909
5daac2531b add accounts tests for new settings 2020-12-16 10:09:58 +00:00
wh1te909
68def00327 fix tests 2020-12-16 09:40:36 +00:00
wh1te909
67e7976710 pipelines attempt 2 2020-12-16 09:25:28 +00:00
wh1te909
35747e937e try to get pipelines to fail 2020-12-16 09:10:53 +00:00
wh1te909
fb439787a4 Release 0.2.17 2020-12-16 00:37:59 +00:00
wh1te909
8fa368f473 bump versions 2020-12-16 00:36:43 +00:00
sadnub
c84a9d07b1 tactical-cli for managing docker installations 2020-12-15 13:41:03 -05:00
wh1te909
7fb46cdfc4 add more targeting options to bulk actions 2020-12-15 08:30:55 +00:00
Tragic Bronson
52985e5ddc Merge pull request #203 from wh1te909/dependabot/npm_and_yarn/docs/ini-1.3.8
Bump ini from 1.3.5 to 1.3.8 in /docs
2020-12-15 00:10:01 -08:00
wh1te909
e880935dc3 make script name required 2020-12-15 07:37:37 +00:00
wh1te909
cc22b1bca5 send favorite data when adding new script 2020-12-15 07:37:09 +00:00
wh1te909
49a5128918 remove extra migrations already handled by another func 2020-12-15 05:06:33 +00:00
wh1te909
fedc7dcb44 #204 add optional setting to prevent initial admin user from being modified or deleted 2020-12-14 21:00:25 +00:00
wh1te909
cd32b20215 remove vue tests for now 2020-12-14 20:59:43 +00:00
wh1te909
15cd9832c4 change fav script context menu style 2020-12-14 20:41:07 +00:00
wh1te909
f25d4e4553 add agent recovery periodic task 2020-12-14 19:27:09 +00:00
Tragic Bronson
12d1c82b63 Merge pull request #200 from sadnub/develop
Scripts Manager Rework
2020-12-14 10:35:19 -08:00
wh1te909
aebe855078 add a favorite menu to agent's context menu for easy way to run scripts 2020-12-14 11:28:00 +00:00
wh1te909
3416a71ebd add community scripts to migration 2020-12-14 07:17:51 +00:00
Tragic Bronson
94b3fea528 Create FUNDING.yml 2020-12-13 20:57:05 -08:00
Josh
ad1a9ecca1 fix agent table pending actions filter 2020-12-14 04:39:42 +00:00
Josh
715accfb8a scripts rework 2020-12-14 04:39:02 +00:00
wh1te909
a8e03c6138 Release 0.2.16 2020-12-13 11:46:12 +00:00
wh1te909
f69446b648 agent 1.1.11 wh1te909/rmmagent@f693d15322 2020-12-13 11:45:24 +00:00
dependabot[bot]
eedfbe5846 Bump ini from 1.3.5 to 1.3.8 in /docs
Bumps [ini](https://github.com/isaacs/ini) from 1.3.5 to 1.3.8.
- [Release notes](https://github.com/isaacs/ini/releases)
- [Commits](https://github.com/isaacs/ini/compare/v1.3.5...v1.3.8)

Signed-off-by: dependabot[bot] <support@github.com>
2020-12-13 07:18:22 +00:00
wh1te909
153351cc9f Release 0.2.15 2020-12-12 09:40:08 +00:00
wh1te909
1b1eec40a7 agent check-in and recovery improvements 2020-12-12 09:39:20 +00:00
wh1te909
763877541a Release 0.2.14 2020-12-12 01:59:47 +00:00
wh1te909
1fad7d72a2 fix for special chars in computer hostname closes #201 2020-12-12 01:59:10 +00:00
wh1te909
51ea2ea879 Release 0.2.13 2020-12-11 20:48:11 +00:00
wh1te909
d77a478bf0 agent 1.1.8 2020-12-11 20:47:54 +00:00
wh1te909
e413c0264a Release 0.2.12 2020-12-11 07:28:27 +00:00
wh1te909
f88e7f898c bump versions 2020-12-11 07:27:42 +00:00
wh1te909
d07bd4a6db add optional silent flag to installer 2020-12-11 07:25:42 +00:00
wh1te909
fb34c099d5 Release 0.2.11 2020-12-10 19:13:24 +00:00
wh1te909
1d2ee56a15 bump versions 2020-12-10 19:12:30 +00:00
wh1te909
86665f7f09 change update task for agent 1.1.6 2020-12-10 19:08:29 +00:00
wh1te909
0d2b4af986 Release 0.2.10 2020-12-10 10:34:40 +00:00
wh1te909
dc2b2eeb9f bump versions 2020-12-10 10:33:44 +00:00
wh1te909
e5dbb66d53 cleanup agent update func 2020-12-10 10:31:58 +00:00
wh1te909
3474b1c471 fix failing checks alert 2020-12-10 00:01:54 +00:00
wh1te909
3886de5b7c add postgres vacuum 2020-12-10 00:00:02 +00:00
wh1te909
2b3cec06b3 Release 0.2.9 2020-12-09 05:07:11 +00:00
wh1te909
8536754d14 bump version for new agent 2020-12-09 05:06:19 +00:00
wh1te909
1f36235801 fix wording 2020-12-09 05:04:25 +00:00
wh1te909
a4194b14f9 Release 0.2.8 2020-12-09 00:50:48 +00:00
wh1te909
2dcc629d9d bump versions 2020-12-09 00:31:33 +00:00
wh1te909
98ddadc6bc add sync task 2020-12-08 23:02:05 +00:00
wh1te909
f6e47b7383 remove extra services view 2020-12-08 20:09:09 +00:00
wh1te909
f073ddc906 Release 0.2.7 2020-12-07 09:50:37 +00:00
wh1te909
3e00631925 cleanup older pending action agent updates if one exists with an older agent version 2020-12-07 09:50:15 +00:00
wh1te909
9b7ac58562 Release 0.2.6 2020-12-07 08:56:20 +00:00
wh1te909
f242ddd801 bump versions 2020-12-07 08:55:49 +00:00
wh1te909
c129886fe2 change sleeps 2020-12-07 08:30:21 +00:00
wh1te909
f577e814cf add refresh summary 2020-12-07 08:29:37 +00:00
wh1te909
c860a0cedd update reqs 2020-12-07 00:35:38 +00:00
wh1te909
ae7e28e492 try fixing coveralls branch 2020-12-06 00:43:36 +00:00
wh1te909
90a63234ad add coveralls 2020-12-04 06:40:44 +00:00
wh1te909
14bca52e8f remove dead code, update middleware 2020-12-04 06:25:53 +00:00
wh1te909
2f3c3361cf remove static clients list from audit log 2020-12-04 06:05:25 +00:00
wh1te909
4034134055 add task scheduler expire after wh1te909/rmmagent@fe91e5f110 2020-12-03 22:46:25 +00:00
sadnub
c04f94cb7b fix certificates on docker 2020-12-03 12:29:03 -05:00
sadnub
fd1bbc7925 Update docker-build-push.yml 2020-12-02 07:53:12 -05:00
wh1te909
ff69bed394 Release 0.2.5 2020-12-02 11:06:55 +00:00
wh1te909
d6e8c5146f bump version 2020-12-02 11:06:34 +00:00
wh1te909
9a04cf99d7 fix pending actions ui 2020-12-02 11:05:29 +00:00
wh1te909
86e7c11e71 fix mesh nginx 2020-12-02 10:40:20 +00:00
wh1te909
361cc08faa Release 0.2.4 2020-12-02 05:45:55 +00:00
wh1te909
70dc771052 bump rmm and agent ver 2020-12-02 05:35:13 +00:00
wh1te909
c14873a799 update optional args 2020-12-02 05:33:35 +00:00
wh1te909
bba5abd74b bump script vers 2020-12-02 05:16:16 +00:00
wh1te909
a224e79c1f bump mesh and vue 2020-12-02 04:51:05 +00:00
wh1te909
c305d98186 remove old code 2020-12-02 04:14:35 +00:00
wh1te909
7c5a473e71 add flag to skip salt during agent install 2020-12-02 04:00:36 +00:00
wh1te909
5e0f5d1eed check for old installers 2020-12-02 03:23:16 +00:00
wh1te909
238b269bc4 remove update salt task 2020-12-02 03:22:19 +00:00
Josh
0ad121b9d2 fix tests attempt 2 2020-12-01 16:46:38 +00:00
Josh
7088acd9fd fix tests and remove travis config 2020-12-01 16:41:59 +00:00
Josh
e0a900d4b6 test for rm_orphaned_task in core maintenance 2020-12-01 16:35:34 +00:00
Josh
a0fe2f0c7d fix tests 2020-12-01 16:11:03 +00:00
Josh
d5b9bc2f26 get cert file locations from settings in docker build 2020-12-01 16:10:49 +00:00
Josh
584254e6ca fix/add tests 2020-12-01 15:55:26 +00:00
wh1te909
a2963ed7bb reload table when pending action changed 2020-12-01 07:01:50 +00:00
wh1te909
2a3c2e133d fix wording 2020-12-01 06:43:52 +00:00
wh1te909
3e7dcb2755 don't hide refresh when sw list empty 2020-12-01 06:27:34 +00:00
wh1te909
faeec00b39 remove more tasks now handled by the agent 2020-12-01 06:16:09 +00:00
wh1te909
eeed81392f add rm orphaned tasks to maintenance tab 2020-12-01 05:55:27 +00:00
wh1te909
95dce9e992 check for supported agent 2020-12-01 05:52:32 +00:00
wh1te909
502bd2a191 patch nats 2020-12-01 05:16:47 +00:00
wh1te909
17ac92a9d0 remove dead code 2020-12-01 05:16:37 +00:00
wh1te909
ba028cde0c remove old api app 2020-12-01 05:00:13 +00:00
wh1te909
6e751e7a9b remove bg task that's handled by the agent now 2020-12-01 04:51:51 +00:00
wh1te909
948b56d0e6 add a ghetto check for non standard cert 2020-12-01 04:47:09 +00:00
wh1te909
4bf2dc9ece don't create unnecessary outage records 2020-12-01 04:44:38 +00:00
Josh
125823f8ab add server maintenance to tools menu 2020-12-01 03:44:58 +00:00
Josh
24d33397e9 add virtual scroll to audit log table 2020-12-01 02:17:20 +00:00
Josh
2c553825f4 add server-side pagination for audit logging 2020-12-01 02:01:10 +00:00
wh1te909
198c485e9a reduce threads 2020-11-30 21:51:25 +00:00
wh1te909
0138505507 reduce threads 2020-11-30 21:49:47 +00:00
wh1te909
5d50dcc600 add api endpoint for software 2020-11-30 21:45:12 +00:00
wh1te909
7bdd8c4626 add some type hints 2020-11-30 10:28:25 +00:00
wh1te909
fc82c35f0c finish moving schedtasks to nats 2020-11-30 08:18:47 +00:00
wh1te909
426ebad300 start moving schedtasks to nats wh1te909/rmmagent@0cde11a067 2020-11-29 23:40:29 +00:00
sadnub
1afe61c593 fix docker-compose.yml 2020-11-29 14:24:32 -05:00
wh1te909
c20751829b create migration for schedtask weekdays 2020-11-29 10:37:46 +00:00
Tragic Bronson
a3b8ee8392 Merge pull request #194 from sadnub/develop
Get mesh version for settings.py
2020-11-28 21:02:58 -08:00
Josh
156c0fe7f6 add dockerignore and get MESH_VER from settings.py 2020-11-29 04:47:34 +00:00
wh1te909
216f7a38cf support mesh > 0.6.84 wh1te909/rmmagent@85aab2facf 2020-11-29 04:15:57 +00:00
Tragic Bronson
fd04dc10d4 Merge pull request #193 from sadnub/feature-uichanges
Some fixes
2020-11-28 19:48:41 -08:00
Josh
d39bdce926 add install agent to site context menu 2020-11-29 03:30:31 +00:00
Josh
c6e01245b0 fix disabled prop on edit agent patch policy and agent checks tab 2020-11-29 02:56:35 +00:00
Josh
c168ee7ba4 bump app version and mesh version 2020-11-29 02:44:29 +00:00
Josh
7575253000 regenerate policies and tasks on site/client change on agent 2020-11-29 02:35:30 +00:00
Josh
c28c1efbb1 Add pending actions to agent table and filter 2020-11-29 02:13:50 +00:00
sadnub
e6aa2c3b78 Delete docker-build-publish.yml 2020-11-28 09:47:41 -05:00
sadnub
ab7c481f83 Create docker-build-push.yml 2020-11-28 09:47:27 -05:00
wh1te909
84ad1c352d Release 0.2.3 2020-11-28 06:09:38 +00:00
wh1te909
e9aad39ac9 bump version 2020-11-28 06:09:01 +00:00
wh1te909
c3444a87bc update backup/restore scripts for nats 2020-11-28 06:05:47 +00:00
sadnub
67b224b340 get automated builds working 2020-11-28 00:23:11 -05:00
sadnub
bded14d36b fix action file 2020-11-27 23:12:22 -05:00
sadnub
73fa0b6631 create github action for testing 2020-11-27 23:09:45 -05:00
Josh Krawczyk
2f07337588 fix mesh container and wait for nginx 2020-11-27 21:15:27 -05:00
wh1te909
da163d44e7 fix nats reload for old agents, fix domain parsing for non standard domains 2020-11-27 22:41:32 +00:00
Josh
56fbf8ae0c docker fixes for salt modules and nats config reload 2020-11-27 19:31:33 +00:00
wh1te909
327eb4b39b Release 0.2.2 2020-11-26 07:37:00 +00:00
wh1te909
ae7873a7e3 fix duplicate key error causing UI to freeze 2020-11-26 07:36:26 +00:00
wh1te909
9a5f01813b Release 0.2.1 2020-11-26 06:20:49 +00:00
wh1te909
0605a3b725 fix uninstall for older agents 2020-11-26 06:20:01 +00:00
wh1te909
09c535f159 Release 0.2.0 2020-11-26 03:43:40 +00:00
wh1te909
7fb11da5df update scripts and bump version 2020-11-26 03:42:27 +00:00
wh1te909
9c9a46499a allow changing of refresh interval for task manager 2020-11-26 01:54:20 +00:00
wh1te909
6fca60261e fix recovery 2020-11-26 01:04:42 +00:00
wh1te909
00537b32ef hide output 2020-11-26 00:38:13 +00:00
wh1te909
8636758a90 fix tests 2020-11-26 00:02:11 +00:00
wh1te909
e39dfbd624 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2020-11-25 23:48:43 +00:00
wh1te909
6e048b2a12 agent recovery improvements 2020-11-25 23:48:14 +00:00
wh1te909
f9657599c2 update agents UI fixes 2020-11-25 23:45:02 +00:00
wh1te909
42ae3bba9b increase timeout for software list 2020-11-25 23:41:49 +00:00
Josh
2fd56a4bfe lock in mongodb and redis service cotnainers to a specific versions 2020-11-25 23:09:17 +00:00
wh1te909
824bcc5603 black 2020-11-25 22:19:27 +00:00
wh1te909
4fbb613aaa change bg tasks schedule 2020-11-25 21:18:03 +00:00
sadnub
9eb45270f2 Update docker readme 2020-11-25 14:53:38 -05:00
Tragic Bronson
75c61c53e8 Merge pull request #190 from sadnub/develop
Docker Setup
2020-11-24 20:46:05 -08:00
sadnub
2688a47436 fix settings 2020-11-24 23:32:26 -05:00
sadnub
fe3bf4b189 get nats container working 2020-11-24 23:25:34 -05:00
Josh Krawczyk
456cb5ebb2 mesh data fix 2020-11-24 23:25:34 -05:00
Josh Krawczyk
3d91d574b4 docker changes 2020-11-24 23:25:34 -05:00
sadnub
54876c5499 fixes to domain in cert generation 2020-11-24 23:25:34 -05:00
sadnub
d256585284 docker changes 2020-11-24 23:25:34 -05:00
sadnub
bd8f100b43 move tactical docker image to alpine 2020-11-24 23:25:34 -05:00
sadnub
44f05f2dcc nats docker setup 2020-11-24 23:25:34 -05:00
sadnub
43f7f82bdc docker fixes 2020-11-24 23:25:34 -05:00
wh1te909
e902f63211 fix response 2020-11-25 03:40:39 +00:00
wh1te909
129f68e194 remove task that's no longer applicable due to recent db changes 2020-11-25 02:47:44 +00:00
wh1te909
4b37fe12d7 remove task that's no longer applicable due to recent db changes 2020-11-25 02:46:50 +00:00
wh1te909
6de79922c5 fix git failing to switch branches during update 2020-11-25 01:18:26 +00:00
wh1te909
e1a9791f44 move run task to nats 2020-11-25 00:17:12 +00:00
wh1te909
81795f51c6 more cleanup 2020-11-24 21:13:54 +00:00
wh1te909
68dfb11155 style fix 2020-11-24 21:09:15 +00:00
wh1te909
39fc1beb89 one more nats 2020-11-24 10:08:20 +00:00
wh1te909
fe0ddec0f9 move runchecks to nats 2020-11-24 05:49:42 +00:00
wh1te909
9b52b4efd9 move wmi to nats 2020-11-24 05:14:45 +00:00
wh1te909
e90e527603 move bulk cmd/script to nats 2020-11-24 04:09:52 +00:00
wh1te909
a510854741 fix core settings for install script 2020-11-23 07:58:22 +00:00
wh1te909
8935ce4ccf move installed software to nats wh1te909/rmmagent@b5b5297350 2020-11-23 06:59:26 +00:00
wh1te909
f9edc9059a format 2020-11-23 06:15:26 +00:00
wh1te909
db8917a769 move reboot to nats 2020-11-23 05:09:06 +00:00
wh1te909
c2d70cc1c2 more nats, fix tests 2020-11-23 03:58:37 +00:00
wh1te909
3b13c7f9ce move agent uninstall to nats wh1te909/rmmagent@502cc0d3de 2020-11-23 02:19:54 +00:00
wh1te909
b7150d8026 don't update last seen in checkrunner 2020-11-23 01:25:33 +00:00
wh1te909
041830a7f8 bump quasar 2020-11-23 01:15:53 +00:00
wh1te909
a18daf0195 fix tests 2020-11-23 00:47:16 +00:00
wh1te909
5d3dfceb22 remove extra logger 2020-11-23 00:46:48 +00:00
wh1te909
c82855e732 remove travis 2020-11-22 23:37:01 +00:00
wh1te909
956f156018 notify if agent not supported 2020-11-22 23:31:59 +00:00
wh1te909
9b13c35e7f nats winsvc check 2020-11-22 23:06:52 +00:00
wh1te909
bc8e637bba add port 4222 to instructions 2020-11-22 22:50:59 +00:00
wh1te909
f03c28c906 mesh nats 2020-11-22 11:29:47 +00:00
wh1te909
e4b1f39fdc move run script to nats 2020-11-22 10:32:21 +00:00
wh1te909
4780af910c add nats recovery 2020-11-22 04:09:23 +00:00
wh1te909
d61ce5c524 move edit winsvc to nats wh1te909/rmmagent@88085847a5 2020-11-22 00:39:56 +00:00
wh1te909
20ab151f4d start moving win svcs to nats wh1te909/rmmagent@d2c9ec7f6d 2020-11-21 23:14:24 +00:00
wh1te909
8a7be7543a Merge branch 'nats' into develop 2020-11-21 04:00:21 +00:00
wh1te909
3f806aec9c fix scripts 2020-11-21 03:37:39 +00:00
wh1te909
6c273b32bb switch axios url 2020-11-21 03:21:16 +00:00
wh1te909
b986f9d6ee add missing escape 2020-11-21 03:07:59 +00:00
wh1te909
c98cca6b7b Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2020-11-21 02:47:41 +00:00
wh1te909
fbec78ede5 cut down on nginx logging 2020-11-21 02:47:23 +00:00
sadnub
c1d9a2d1f1 certificate fixes and mesh setup improvements 2020-11-20 16:42:12 -05:00
sadnub
8a10036f32 fix tests 2020-11-20 10:30:07 -05:00
sadnub
924a3aec0e Update readme.md 2020-11-20 10:21:45 -05:00
sadnub
3b3ac31541 fix certificates 2020-11-20 09:46:05 -05:00
wh1te909
e0cb2f9d0f add new agent update method wh1te909/rmmagent@9ede622837 2020-11-20 09:23:38 +00:00
wh1te909
549b4edb59 self update the update script 2020-11-20 08:09:58 +00:00
Tragic Bronson
67c912aca2 Merge pull request #186 from sadnub/develop
New Docker Setup
2020-11-19 22:59:03 -08:00
sadnub
a74dde5d9e attempt tests fix 2020-11-20 00:04:28 -05:00
sadnub
f7bcd24726 working docker setup with Dockerhub 2020-11-19 23:34:50 -05:00
sadnub
337c900770 fix merge conflicts 2020-11-19 19:18:33 -05:00
Josh Krawczyk
e83e73ead4 finished up docker 2020-11-19 19:03:44 -05:00
Josh Krawczyk
9ec2f6b64d more docker changes 2020-11-18 22:42:45 -05:00
wh1te909
f970592efe eventlog 2020-11-18 07:35:57 +00:00
sadnub
7592c11e99 more docker changes 2020-11-17 23:39:03 -05:00
sadnub
759b05e137 more docker changes 2020-11-17 22:56:16 -05:00
wh1te909
42ebd9ffce procs, cmd and eventlog nats 2020-11-17 08:25:56 +00:00
Josh Krawczyk
bc0fc33966 more docker additions 2020-11-16 21:22:28 -05:00
Josh Krawczyk
f4aab16e39 more docker changes 2020-11-16 14:28:10 -05:00
wh1te909
e91425287c start nats 2020-11-15 07:40:26 +00:00
sadnub
f05908f570 docker rework start 2020-11-14 16:55:05 -05:00
649 changed files with 58339 additions and 65458 deletions

View File

@@ -0,0 +1,28 @@
COMPOSE_PROJECT_NAME=trmm
IMAGE_REPO=tacticalrmm/
VERSION=latest
# tactical credentials (Used to login to dashboard)
TRMM_USER=tactical
TRMM_PASS=tactical
# dns settings
APP_HOST=rmm.example.com
API_HOST=api.example.com
MESH_HOST=mesh.example.com
# mesh settings
MESH_USER=tactical
MESH_PASS=tactical
MONGODB_USER=mongouser
MONGODB_PASSWORD=mongopass
# database settings
POSTGRES_USER=postgres
POSTGRES_PASS=postgrespass
# DEV SETTINGS
APP_PORT=80
API_PORT=80
HTTP_PROTOCOL=https

View File

@@ -0,0 +1,24 @@
FROM python:3.9.2-slim
ENV TACTICAL_DIR /opt/tactical
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
ENV WORKSPACE_DIR /workspace
ENV TACTICAL_USER tactical
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
EXPOSE 8000 8383 8005
RUN groupadd -g 1000 tactical && \
useradd -u 1000 -g 1000 tactical
# Copy Dev python reqs
COPY ./requirements.txt /
# Copy Docker Entrypoint
COPY ./entrypoint.sh /
RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm

View File

@@ -0,0 +1,19 @@
version: '3.4'
services:
api-dev:
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
ports:
- 8000:8000
- 5678:5678
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
networks:
dev:
aliases:
- tactical-backend

View File

@@ -0,0 +1,257 @@
version: '3.4'
services:
api-dev:
container_name: trmm-api-dev
image: api-dev
restart: always
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-api"]
environment:
API_PORT: ${API_PORT}
ports:
- "8000:${API_PORT}"
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
networks:
dev:
aliases:
- tactical-backend
app-dev:
container_name: trmm-app-dev
image: node:14-alpine
restart: always
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
working_dir: /workspace/web
volumes:
- ..:/workspace:cached
ports:
- "8080:${APP_PORT}"
networks:
dev:
aliases:
- tactical-frontend
# nats
nats-dev:
container_name: trmm-nats-dev
image: ${IMAGE_REPO}tactical-nats:${VERSION}
restart: always
environment:
API_HOST: ${API_HOST}
API_PORT: ${API_PORT}
DEV: 1
ports:
- "4222:4222"
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
networks:
dev:
aliases:
- ${API_HOST}
- tactical-nats
# meshcentral container
meshcentral-dev:
container_name: trmm-meshcentral-dev
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
restart: always
environment:
MESH_HOST: ${MESH_HOST}
MESH_USER: ${MESH_USER}
MESH_PASS: ${MESH_PASS}
MONGODB_USER: ${MONGODB_USER}
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
NGINX_HOST_IP: 172.21.0.20
networks:
dev:
aliases:
- tactical-meshcentral
- ${MESH_HOST}
volumes:
- tactical-data-dev:/opt/tactical
- mesh-data-dev:/home/node/app/meshcentral-data
depends_on:
- mongodb-dev
# mongodb container for meshcentral
mongodb-dev:
container_name: trmm-mongodb-dev
image: mongo:4.4
restart: always
environment:
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
MONGO_INITDB_DATABASE: meshcentral
networks:
dev:
aliases:
- tactical-mongodb
volumes:
- mongo-dev-data:/data/db
# postgres database for api service
postgres-dev:
container_name: trmm-postgres-dev
image: postgres:13-alpine
restart: always
environment:
POSTGRES_DB: tacticalrmm
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASS}
volumes:
- postgres-data-dev:/var/lib/postgresql/data
networks:
dev:
aliases:
- tactical-postgres
# redis container for celery tasks
redis-dev:
container_name: trmm-redis-dev
restart: always
image: redis:6.0-alpine
networks:
dev:
aliases:
- tactical-redis
init-dev:
container_name: trmm-init-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
restart: on-failure
command: ["tactical-init-dev"]
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASS: ${POSTGRES_PASS}
APP_HOST: ${APP_HOST}
API_HOST: ${API_HOST}
MESH_HOST: ${MESH_HOST}
MESH_USER: ${MESH_USER}
TRMM_USER: ${TRMM_USER}
TRMM_PASS: ${TRMM_PASS}
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
APP_PORT: ${APP_PORT}
depends_on:
- postgres-dev
- meshcentral-dev
networks:
- dev
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
# container for celery worker service
celery-dev:
container_name: trmm-celery-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celery-dev"]
restart: always
networks:
- dev
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
depends_on:
- postgres-dev
- redis-dev
# container for celery beat service
celerybeat-dev:
container_name: trmm-celerybeat-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celerybeat-dev"]
restart: always
networks:
- dev
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
depends_on:
- postgres-dev
- redis-dev
# container for websockets communication
websockets-dev:
container_name: trmm-websockets-dev
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-websockets-dev"]
restart: always
networks:
dev:
aliases:
- tactical-websockets
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
depends_on:
- postgres-dev
- redis-dev
# container for tactical reverse proxy
nginx-dev:
container_name: trmm-nginx-dev
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
restart: always
environment:
APP_HOST: ${APP_HOST}
API_HOST: ${API_HOST}
MESH_HOST: ${MESH_HOST}
CERT_PUB_KEY: ${CERT_PUB_KEY}
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
APP_PORT: ${APP_PORT}
API_PORT: ${API_PORT}
networks:
dev:
ipv4_address: 172.21.0.20
ports:
- "80:80"
- "443:443"
volumes:
- tactical-data-dev:/opt/tactical
mkdocs-dev:
container_name: trmm-mkdocs-dev
image: api-dev
restart: always
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-mkdocs-dev"]
ports:
- "8005:8005"
volumes:
- ..:/workspace:cached
networks:
- dev
volumes:
tactical-data-dev:
postgres-data-dev:
mongo-dev-data:
mesh-data-dev:
networks:
dev:
driver: bridge
ipam:
driver: default
config:
- subnet: 172.21.0.0/24

177
.devcontainer/entrypoint.sh Normal file
View File

@@ -0,0 +1,177 @@
#!/usr/bin/env bash
set -e
: "${TRMM_USER:=tactical}"
: "${TRMM_PASS:=tactical}"
: "${POSTGRES_HOST:=tactical-postgres}"
: "${POSTGRES_PORT:=5432}"
: "${POSTGRES_USER:=tactical}"
: "${POSTGRES_PASS:=tactical}"
: "${POSTGRES_DB:=tacticalrmm}"
: "${MESH_CONTAINER:=tactical-meshcentral}"
: "${MESH_USER:=meshcentral}"
: "${MESH_PASS:=meshcentralpass}"
: "${MESH_HOST:=tactical-meshcentral}"
: "${API_HOST:=tactical-backend}"
: "${APP_HOST:=tactical-frontend}"
: "${REDIS_HOST:=tactical-redis}"
: "${HTTP_PROTOCOL:=http}"
: "${APP_PORT:=8080}"
: "${API_PORT:=8000}"
# Add python venv to path
export PATH="${VIRTUAL_ENV}/bin:$PATH"
function check_tactical_ready {
sleep 15
until [ -f "${TACTICAL_READY_FILE}" ]; do
echo "waiting for init container to finish install or update..."
sleep 10
done
}
function django_setup {
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
echo "waiting for postgresql container to be ready..."
sleep 5
done
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
echo "waiting for meshcentral container to be ready..."
sleep 5
done
echo "setting up django environment"
# configure django settings
MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)"
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
localvars="$(cat << EOF
SECRET_KEY = '${DJANGO_SEKRET}'
DEBUG = True
DOCKER_BUILD = True
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
KEY_FILE = '/opt/tactical/certs/privkey.pem'
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
ALLOWED_HOSTS = ['${API_HOST}', '*']
ADMIN_URL = 'admin/'
CORS_ORIGIN_ALLOW_ALL = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': '${POSTGRES_DB}',
'USER': '${POSTGRES_USER}',
'PASSWORD': '${POSTGRES_PASS}',
'HOST': '${POSTGRES_HOST}',
'PORT': '${POSTGRES_PORT}',
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = '${MESH_USER}'
MESH_SITE = 'https://${MESH_HOST}'
MESH_TOKEN_KEY = '${MESH_TOKEN}'
REDIS_HOST = '${REDIS_HOST}'
ADMIN_ENABLED = True
EOF
)"
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
# run migrations and init scripts
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
# create super user
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
}
if [ "$1" = 'tactical-init-dev' ]; then
# make directories if they don't exist
mkdir -p "${TACTICAL_DIR}/tmp"
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
# setup Python virtual env and install dependencies
! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV}
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
django_setup
# create .env file for frontend
webenv="$(cat << EOF
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
APP_URL = "https://${APP_HOST}"
DOCKER_BUILD = 1
EOF
)"
echo "${webenv}" | tee "${WORKSPACE_DIR}"/web/.env > /dev/null
# chown everything to tactical user
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
# create install ready file
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
fi
if [ "$1" = 'tactical-api' ]; then
check_tactical_ready
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
fi
if [ "$1" = 'tactical-celery-dev' ]; then
check_tactical_ready
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm worker -l debug
fi
if [ "$1" = 'tactical-celerybeat-dev' ]; then
check_tactical_ready
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
fi
if [ "$1" = 'tactical-websockets-dev' ]; then
check_tactical_ready
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
fi
if [ "$1" = 'tactical-mkdocs-dev' ]; then
cd "${WORKSPACE_DIR}/docs"
"${VIRTUAL_ENV}"/bin/mkdocs serve
fi

View File

@@ -0,0 +1,35 @@
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
asyncio-nats-client
celery
channels
Django
django-cors-headers
django-rest-knox
djangorestframework
loguru
msgpack
psycopg2-binary
pycparser
pycryptodome
pyotp
pyparsing
pytz
qrcode
redis
twilio
packaging
validators
websockets
black
Werkzeug
django-extensions
coverage
coveralls
model_bakery
mkdocs
mkdocs-material
pymdown-extensions
Pygments
mypy
pysnooper
isort

25
.dockerignore Normal file
View File

@@ -0,0 +1,25 @@
**/__pycache__
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
**/env
README.md

12
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# These are supported funding model platforms
github: wh1te909
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: tacticalrmm
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

40
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,40 @@
---
name: Bug report
about: Create a bug report
title: ''
labels: ''
assignees: ''
---
**Server Info (please complete the following information):**
- OS: [e.g. Ubuntu 20.04, Debian 10]
- Browser: [e.g. chrome, safari]
- RMM Version (as shown in top left of web UI):
**Installation Method:**
- [ ] Standard
- [ ] Docker
**Agent Info (please complete the following information):**
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Additional context**
Add any other context about the problem here.

View File

@@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

22
.github/workflows/deploy-docs.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name: Deploy Docs
on:
push:
branches:
- master
defaults:
run:
working-directory: docs
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.x
- run: pip install --upgrade pip
- run: pip install --upgrade setuptools wheel
- run: pip install mkdocs mkdocs-material pymdown-extensions
- run: mkdocs gh-deploy --force

78
.github/workflows/docker-build-push.yml vendored Normal file
View File

@@ -0,0 +1,78 @@
name: Publish Tactical Docker Images
on:
push:
tags:
- "v*.*.*"
jobs:
docker:
name: Build and Push Docker Images
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Get Github Tag
id: prep
run: |
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and Push Tactical Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
- name: Build and Push Tactical MeshCentral Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-meshcentral/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
- name: Build and Push Tactical NATS Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-nats/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
- name: Build and Push Tactical Frontend Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-frontend/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
- name: Build and Push Tactical Nginx Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-nginx/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-nginx:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nginx:latest

4
.gitignore vendored
View File

@@ -42,4 +42,8 @@ api/tacticalrmm/accounts/management/commands/random_data.py
versioninfo.go
resource.syso
htmlcov/
docker-compose.dev.yml
docs/.vuepress/dist
nats-rmm.conf
.mypy_cache
docs/site/

View File

@@ -1,43 +0,0 @@
dist: focal
matrix:
include:
- language: node_js
node_js: "12"
before_install:
- cd web
install:
- npm install
script:
- npm run test:unit
- language: python
python: "3.8"
services:
- redis
addons:
postgresql: "13"
apt:
packages:
- postgresql-13
before_script:
- psql -c 'CREATE DATABASE travisci;' -U postgres
- psql -c "CREATE USER travisci WITH PASSWORD 'travisSuperSekret6645';" -U postgres
- psql -c 'GRANT ALL PRIVILEGES ON DATABASE travisci TO travisci;' -U postgres
- psql -c 'ALTER USER travisci CREATEDB;' -U postgres
before_install:
- cd api/tacticalrmm
install:
- pip install --no-cache-dir --upgrade pip
- pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
- pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
script:
- coverage run manage.py test -v 2
after_success:
- coveralls

14
.vscode/launch.json vendored
View File

@@ -14,6 +14,20 @@
"0.0.0.0:8000"
],
"django": true
},
{
"name": "Django: Docker Remote Attach",
"type": "python",
"request": "attach",
"port": 5678,
"host": "localhost",
"preLaunchTask": "docker debug",
"pathMappings": [
{
"localRoot": "${workspaceFolder}/api/tacticalrmm",
"remoteRoot": "/workspace/api/tacticalrmm"
}
]
}
]
}

28
.vscode/settings.json vendored
View File

@@ -2,8 +2,15 @@
"python.pythonPath": "api/tacticalrmm/env/bin/python",
"python.languageServer": "Pylance",
"python.analysis.extraPaths": [
"api/tacticalrmm"
"api/tacticalrmm",
"api/env",
],
"python.analysis.diagnosticSeverityOverrides": {
"reportUnusedImport": "error",
"reportDuplicateImport": "error",
},
"python.analysis.memory.keepLibraryAst": true,
"python.linting.mypyEnabled": true,
"python.analysis.typeCheckingMode": "basic",
"python.formatting.provider": "black",
"editor.formatOnSave": true,
@@ -41,4 +48,23 @@
"**/*.zip": true
},
},
"go.useLanguageServer": true,
"[go]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": false,
},
"editor.snippetSuggestions": "none",
},
"[go.mod]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
},
"gopls": {
"usePlaceholders": true,
"completeUnimported": true,
"staticcheck": true,
}
}

23
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,23 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "docker debug",
"type": "shell",
"command": "docker-compose",
"args": [
"-p",
"trmm",
"-f",
".devcontainer/docker-compose.yml",
"-f",
".devcontainer/docker-compose.debug.yml",
"up",
"-d",
"--build"
]
}
]
}

116
README.md
View File

@@ -1,21 +1,20 @@
# Tactical RMM
[![Build Status](https://travis-ci.com/wh1te909/tacticalrmm.svg?branch=develop)](https://travis-ci.com/wh1te909/tacticalrmm)
[![Build Status](https://dev.azure.com/dcparsi/Tactical%20RMM/_apis/build/status/wh1te909.tacticalrmm?branchName=develop)](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
[![Coverage Status](https://coveralls.io/repos/github/wh1te909/tacticalrmm/badge.png?branch=develop&kill_cache=1)](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black)
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
# [LIVE DEMO](https://rmm.xlawgaming.com/)
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
### [Discord Chat](https://discord.gg/upGTkWp)
### [Documentation](https://wh1te909.github.io/tacticalrmm/)
## Features
- Teamviewer-like remote desktop control
@@ -34,109 +33,6 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
- Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019
## Installation
## Installation / Backup / Restore / Usage
### Requirements
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
- A domain you own with at least 3 subdomains
- Google Authenticator app (2 factor is NOT optional)
### Docker
Refer to the [docker setup](docker/readme.md)
### Installation example (Ubuntu server 20.04 LTS)
Fresh VPS with latest updates\
login as root and create a user and add to sudoers group (we will be creating a user called tactical)
```
apt update && apt -y upgrade
adduser tactical
usermod -a -G sudo tactical
```
switch to the tactical user and setup the firewall
```
su - tactical
sudo ufw default deny incoming
sudo ufw default allow outgoing
sudo ufw allow ssh
sudo ufw allow http
sudo ufw allow https
sudo ufw allow proto tcp from any to any port 4505,4506
sudo ufw enable && sudo ufw reload
```
Our domain for this example is tacticalrmm.com
In the DNS manager of wherever our domain is hosted, we will create three A records, all pointing to the public IP address of our VPS
Create A record ```api.tacticalrmm.com``` for the django rest backend\
Create A record ```rmm.tacticalrmm.com``` for the vue frontend\
Create A record ```mesh.tacticalrmm.com``` for meshcentral
Download the install script and run it
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/install.sh
chmod +x install.sh
./install.sh
```
Links will be provided at the end of the install script.\
Download the executable from the first link, then open ```rmm.tacticalrmm.com``` and login.\
Upload the executable when prompted during the initial setup page.
### Install an agent
From the app's dashboard, choose Agents > Install Agent to generate an installer.
## Updating
Download and run [update.sh](./update.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh))
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh
chmod +x update.sh
./update.sh
```
## Backup
Download [backup.sh](./backup.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh))
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh
```
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
Run it
```
chmod +x backup.sh
./backup.sh
```
## Restore
Change your 3 A records to point to new server's public IP
Create same linux user account as old server and add to sudoers group and setup firewall (see install instructions above)
Copy backup file to new server
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/restore.sh
```
Run the restore script, passing it the backup tar file as the first argument
```
chmod +x restore.sh
./restore.sh rmm-backup-xxxxxxx.tar
```
## Using another ssl certificate
During the install you can opt out of using the Let's Encrypt certificate. If you do this the script will create a self-signed certificate, so that https continues to work. You can replace the certificates in /certs/example.com/(privkey.pem | pubkey.pem) with your own.
If you are migrating from Let's Encrypt to another certificate provider, you can create the /certs directory and copy your certificates there. It is recommended to do this because this directory will be backed up with the backup script provided. Then modify the nginx configurations to use your new certificates
The cert that is generated is a wildcard certificate and is used in the nginx configurations: rmm.conf, api.conf, and mesh.conf. If you can't generate wildcard certificates you can create a cert for each subdomain and configure each nginx configuration file to use its own certificate. Then restart nginx:
```
sudo systemctl restart nginx
```
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)

View File

@@ -1,457 +0,0 @@
from __future__ import absolute_import
import psutil
import os
import datetime
import zlib
import json
import base64
import wmi
import win32evtlog
import win32con
import win32evtlogutil
import winerror
from time import sleep
import requests
import subprocess
import random
import platform
ARCH = "64" if platform.machine().endswith("64") else "32"
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
SYS_DRIVE = os.environ["SystemDrive"]
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
def get_services():
# see https://github.com/wh1te909/tacticalrmm/issues/38
# for why I am manually implementing the svc.as_dict() method of psutil
ret = []
for svc in psutil.win_service_iter():
i = {}
try:
i["display_name"] = svc.display_name()
i["binpath"] = svc.binpath()
i["username"] = svc.username()
i["start_type"] = svc.start_type()
i["status"] = svc.status()
i["pid"] = svc.pid()
i["name"] = svc.name()
i["description"] = svc.description()
except Exception:
continue
else:
ret.append(i)
return ret
def run_python_script(filename, timeout, script_type="userdefined"):
# no longer used in agent version 0.11.0
file_path = os.path.join(TEMP_DIR, filename)
if os.path.exists(file_path):
try:
os.remove(file_path)
except:
pass
if script_type == "userdefined":
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
else:
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
if shell == "powershell" or shell == "cmd":
if args:
return __salt__["cmd.script"](
source=filepath,
args=" ".join(map(lambda x: f'"{x}"', args)),
shell=shell,
timeout=timeout,
bg=bg,
)
else:
return __salt__["cmd.script"](
source=filepath, shell=shell, timeout=timeout, bg=bg
)
elif shell == "python":
file_path = os.path.join(TEMP_DIR, filename)
if os.path.exists(file_path):
try:
os.remove(file_path)
except:
pass
__salt__["cp.get_file"](filepath, file_path)
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
if args:
a = " ".join(map(lambda x: f'"{x}"', args))
cmd = f"{PY_BIN} {file_path} {a}"
return __salt__[salt_cmd](cmd, timeout=timeout)
else:
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
def uninstall_agent():
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
return "ok"
def update_salt():
for p in psutil.process_iter():
with p.oneshot():
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
return "running"
from subprocess import Popen, PIPE
CREATE_NEW_PROCESS_GROUP = 0x00000200
DETACHED_PROCESS = 0x00000008
cmd = [TAC_RMM, "-m", "updatesalt"]
p = Popen(
cmd,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
close_fds=True,
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
)
return p.pid
def run_manual_checks():
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
return "ok"
def install_updates():
for p in psutil.process_iter():
with p.oneshot():
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
return "running"
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
def _wait_for_service(svc, status, retries=10):
attempts = 0
while 1:
try:
service = psutil.win_service_get(svc)
except psutil.NoSuchProcess:
stat = "fail"
attempts += 1
sleep(5)
else:
stat = service.status()
if stat != status:
attempts += 1
sleep(5)
else:
attempts = 0
if attempts == 0 or attempts > retries:
break
return stat
def agent_update_v2(inno, url):
# make sure another instance of the update is not running
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
# so if more than 2 running, don't continue as an update is already running
count = 0
for p in psutil.process_iter():
try:
with p.oneshot():
if "win_agent.agent_update_v2" in p.cmdline():
count += 1
except Exception:
continue
if count > 2:
return "already running"
sleep(random.randint(1, 20)) # don't flood the rmm
exe = os.path.join(TEMP_DIR, inno)
if os.path.exists(exe):
try:
os.remove(exe)
except:
pass
try:
r = requests.get(url, stream=True, timeout=600)
except Exception:
return "failed"
if r.status_code != 200:
return "failed"
with open(exe, "wb") as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
del r
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
tac = _wait_for_service(svc="tacticalagent", status="running")
if tac != "running":
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
chk = _wait_for_service(svc="checkrunner", status="running")
if chk != "running":
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
return "ok"
def do_agent_update_v2(inno, url):
return __salt__["cmd.run_bg"](
[
SALT_CALL,
"win_agent.agent_update_v2",
f"inno={inno}",
f"url={url}",
"--local",
]
)
def agent_update(version, url):
# make sure another instance of the update is not running
# this function spawns 2 instances of itself so if more than 2 running,
# don't continue as an update is already running
count = 0
for p in psutil.process_iter():
try:
with p.oneshot():
if "win_agent.agent_update" in p.cmdline():
count += 1
except Exception:
continue
if count > 2:
return "already running"
sleep(random.randint(1, 60)) # don't flood the rmm
try:
r = requests.get(url, stream=True, timeout=600)
except Exception:
return "failed"
if r.status_code != 200:
return "failed"
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
with open(exe, "wb") as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
del r
services = ("tacticalagent", "checkrunner")
for svc in services:
subprocess.run([NSSM, "stop", svc], timeout=120)
sleep(10)
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
sleep(30)
for svc in services:
subprocess.run([NSSM, "start", svc], timeout=120)
return "ok"
def do_agent_update(version, url):
return __salt__["cmd.run_bg"](
[
SALT_CALL,
"win_agent.agent_update",
f"version={version}",
f"url={url}",
"--local",
]
)
class SystemDetail:
def __init__(self):
self.c = wmi.WMI()
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
self.comp_sys = self.c.Win32_ComputerSystem()
self.memory = self.c.Win32_PhysicalMemory()
self.os = self.c.Win32_OperatingSystem()
self.base_board = self.c.Win32_BaseBoard()
self.bios = self.c.Win32_BIOS()
self.disk = self.c.Win32_DiskDrive()
self.network_adapter = self.c.Win32_NetworkAdapter()
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
self.desktop_monitor = self.c.Win32_DesktopMonitor()
self.cpu = self.c.Win32_Processor()
self.usb = self.c.Win32_USBController()
def get_all(self, obj):
ret = []
for i in obj:
tmp = [
{j: getattr(i, j)}
for j in list(i.properties)
if getattr(i, j) is not None
]
ret.append(tmp)
return ret
def system_info():
info = SystemDetail()
return {
"comp_sys_prod": info.get_all(info.comp_sys_prod),
"comp_sys": info.get_all(info.comp_sys),
"mem": info.get_all(info.memory),
"os": info.get_all(info.os),
"base_board": info.get_all(info.base_board),
"bios": info.get_all(info.bios),
"disk": info.get_all(info.disk),
"network_adapter": info.get_all(info.network_adapter),
"network_config": info.get_all(info.network_config),
"desktop_monitor": info.get_all(info.desktop_monitor),
"cpu": info.get_all(info.cpu),
"usb": info.get_all(info.usb),
}
def local_sys_info():
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
def get_procs():
ret = []
# setup
for proc in psutil.process_iter():
with proc.oneshot():
proc.cpu_percent(interval=None)
# need time for psutil to record cpu percent
sleep(1)
for c, proc in enumerate(psutil.process_iter(), 1):
x = {}
with proc.oneshot():
if proc.pid == 0 or not proc.name():
continue
x["name"] = proc.name()
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
x["memory_percent"] = proc.memory_percent()
x["pid"] = proc.pid
x["ppid"] = proc.ppid()
x["status"] = proc.status()
x["username"] = proc.username()
x["id"] = c
ret.append(x)
return ret
def _compress_json(j):
return {
"wineventlog": base64.b64encode(
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
).decode("ascii", errors="ignore")
}
def get_eventlog(logtype, last_n_days):
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
status_dict = {
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
0: "INFO",
}
computer = "localhost"
hand = win32evtlog.OpenEventLog(computer, logtype)
total = win32evtlog.GetNumberOfEventLogRecords(hand)
log = []
uid = 0
done = False
try:
while 1:
events = win32evtlog.ReadEventLog(hand, flags, 0)
for ev_obj in events:
uid += 1
# return once total number of events reach or we'll be stuck in an infinite loop
if uid >= total:
done = True
break
the_time = ev_obj.TimeGenerated.Format()
time_obj = datetime.datetime.strptime(the_time, "%c")
if time_obj < start_time:
done = True
break
computer = str(ev_obj.ComputerName)
src = str(ev_obj.SourceName)
evt_type = str(status_dict[ev_obj.EventType])
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
evt_category = str(ev_obj.EventCategory)
record = str(ev_obj.RecordNumber)
msg = (
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
.replace("<", "")
.replace(">", "")
)
event_dict = {
"computer": computer,
"source": src,
"eventType": evt_type,
"eventID": evt_id,
"eventCategory": evt_category,
"message": msg,
"time": the_time,
"record": record,
"uid": uid,
}
log.append(event_dict)
if done:
break
except Exception:
pass
win32evtlog.CloseEventLog(hand)
return _compress_json(log)

View File

@@ -20,6 +20,5 @@ omit =
*/urls.py
*/tests.py
*/test.py
api/*.py
checks/utils.py

View File

@@ -1,5 +1,4 @@
from django.contrib import admin
from rest_framework.authtoken.admin import TokenAdmin
from .models import User

View File

@@ -1,6 +1,5 @@
from django.utils import timezone as djangotime
from django.core.management.base import BaseCommand
from django.utils import timezone as djangotime
from knox.models import AuthToken

View File

@@ -1,11 +1,13 @@
import pyotp
import subprocess
import pyotp
from django.core.management.base import BaseCommand
from accounts.models import User
class Command(BaseCommand):
help = "Generates barcode for Google Authenticator and creates totp for user"
help = "Generates barcode for Authenticator and creates totp for user"
def add_arguments(self, parser):
parser.add_argument("code", type=str)
@@ -24,12 +26,10 @@ class Command(BaseCommand):
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
subprocess.run(f'qr "{url}"', shell=True)
self.stdout.write(
self.style.SUCCESS(
"Scan the barcode above with your google authenticator app"
)
self.style.SUCCESS("Scan the barcode above with your authenticator app")
)
self.stdout.write(
self.style.SUCCESS(
f"If that doesn't work you may manually enter the key: {code}"
f"If that doesn't work you may manually enter the setup key: {code}"
)
)

View File

@@ -0,0 +1,57 @@
import os
import subprocess
import pyotp
from django.core.management.base import BaseCommand
from accounts.models import User
class Command(BaseCommand):
help = "Reset 2fa"
def add_arguments(self, parser):
parser.add_argument("username", type=str)
def handle(self, *args, **kwargs):
username = kwargs["username"]
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
return
domain = "Tactical RMM"
nginx = "/etc/nginx/sites-available/frontend.conf"
found = None
if os.path.exists(nginx):
try:
with open(nginx, "r") as f:
for line in f:
if "server_name" in line:
found = line
break
if found:
rep = found.replace("server_name", "").replace(";", "")
domain = "".join(rep.split())
except:
pass
code = pyotp.random_base32()
user.totp_key = code
user.save(update_fields=["totp_key"])
url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain)
subprocess.run(f'qr "{url}"', shell=True)
self.stdout.write(
self.style.WARNING("Scan the barcode above with your authenticator app")
)
self.stdout.write(
self.style.WARNING(
f"If that doesn't work you may manually enter the setup key: {code}"
)
)
self.stdout.write(
self.style.SUCCESS(f"2fa was successfully reset for user {username}")
)

View File

@@ -0,0 +1,22 @@
from django.core.management.base import BaseCommand
from accounts.models import User
class Command(BaseCommand):
help = "Reset password for user"
def add_arguments(self, parser):
parser.add_argument("username", type=str)
def handle(self, *args, **kwargs):
username = kwargs["username"]
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
self.stdout.write(self.style.ERROR(f"User {username} doesn't exist"))
return
passwd = input("Enter new password: ")
user.set_password(passwd)
user.save()
self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!"))

View File

@@ -2,8 +2,8 @@
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@@ -6,28 +6,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_auto_20200810_0544'),
("accounts", "0002_auto_20200810_0544"),
]
operations = [
migrations.AddField(
model_name='user',
name='created_by',
model_name="user",
name="created_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='created_time',
model_name="user",
name="created_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='user',
name='modified_by',
model_name="user",
name="modified_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='modified_time',
model_name="user",
name="modified_time",
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -6,24 +6,24 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_auto_20200922_1344'),
("accounts", "0003_auto_20200922_1344"),
]
operations = [
migrations.RemoveField(
model_name='user',
name='created_by',
model_name="user",
name="created_by",
),
migrations.RemoveField(
model_name='user',
name='created_time',
model_name="user",
name="created_time",
),
migrations.RemoveField(
model_name='user',
name='modified_by',
model_name="user",
name="modified_by",
),
migrations.RemoveField(
model_name='user',
name='modified_time',
model_name="user",
name="modified_time",
),
]

View File

@@ -6,28 +6,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0004_auto_20201002_1257'),
("accounts", "0004_auto_20201002_1257"),
]
operations = [
migrations.AddField(
model_name='user',
name='created_by',
model_name="user",
name="created_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='created_time',
model_name="user",
name="created_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='user',
name='modified_by',
model_name="user",
name="modified_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='modified_time',
model_name="user",
name="modified_time",
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -1,7 +1,7 @@
# Generated by Django 3.1.2 on 2020-11-10 20:24
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0007_update_agent_primary_key'),
("accounts", "0007_update_agent_primary_key"),
]
operations = [
migrations.AddField(
model_name='user',
name='dark_mode',
model_name="user",
name="dark_mode",
field=models.BooleanField(default=True),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.4 on 2020-12-10 17:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0008_user_dark_mode"),
]
operations = [
migrations.AddField(
model_name="user",
name="show_community_scripts",
field=models.BooleanField(default=True),
),
]

View File

@@ -0,0 +1,26 @@
# Generated by Django 3.1.4 on 2021-01-14 01:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0009_user_show_community_scripts"),
]
operations = [
migrations.AddField(
model_name="user",
name="agent_dblclick_action",
field=models.CharField(
choices=[
("editagent", "Edit Agent"),
("takecontrol", "Take Control"),
("remotebg", "Remote Background"),
],
default="editagent",
max_length=50,
),
),
]

View File

@@ -0,0 +1,26 @@
# Generated by Django 3.1.5 on 2021-01-18 09:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0010_user_agent_dblclick_action"),
]
operations = [
migrations.AddField(
model_name="user",
name="default_agent_tbl_tab",
field=models.CharField(
choices=[
("server", "Servers"),
("workstation", "Workstations"),
("mixed", "Mixed"),
],
default="server",
max_length=50,
),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.7 on 2021-02-28 06:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0011_user_default_agent_tbl_tab'),
]
operations = [
migrations.AddField(
model_name='user',
name='agents_per_page',
field=models.PositiveIntegerField(default=50),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.7 on 2021-03-09 02:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0012_user_agents_per_page'),
]
operations = [
migrations.AddField(
model_name='user',
name='client_tree_sort',
field=models.CharField(choices=[('alphafail', 'Move failing clients to the top'), ('alpha', 'Sort alphabetically')], default='alphafail', max_length=50),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2 on 2021-04-11 01:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0013_user_client_tree_sort'),
]
operations = [
migrations.AddField(
model_name='user',
name='client_tree_splitter',
field=models.PositiveIntegerField(default=11),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2 on 2021-04-11 03:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0014_user_client_tree_splitter'),
]
operations = [
migrations.AddField(
model_name='user',
name='loading_bar_color',
field=models.CharField(default='red', max_length=255),
),
]

View File

@@ -1,13 +1,43 @@
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.db import models
from logs.models import BaseAuditModel
AGENT_DBLCLICK_CHOICES = [
("editagent", "Edit Agent"),
("takecontrol", "Take Control"),
("remotebg", "Remote Background"),
]
AGENT_TBL_TAB_CHOICES = [
("server", "Servers"),
("workstation", "Workstations"),
("mixed", "Mixed"),
]
CLIENT_TREE_SORT_CHOICES = [
("alphafail", "Move failing clients to the top"),
("alpha", "Sort alphabetically"),
]
class User(AbstractUser, BaseAuditModel):
is_active = models.BooleanField(default=True)
totp_key = models.CharField(max_length=50, null=True, blank=True)
dark_mode = models.BooleanField(default=True)
show_community_scripts = models.BooleanField(default=True)
agent_dblclick_action = models.CharField(
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
)
default_agent_tbl_tab = models.CharField(
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
)
agents_per_page = models.PositiveIntegerField(default=50) # not currently used
client_tree_sort = models.CharField(
max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail"
)
client_tree_splitter = models.PositiveIntegerField(default=11)
loading_bar_color = models.CharField(max_length=255, default="red")
agent = models.OneToOneField(
"agents.Agent",

View File

@@ -1,13 +1,23 @@
import pyotp
from rest_framework.serializers import (
ModelSerializer,
SerializerMethodField,
)
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from .models import User
class UserUISerializer(ModelSerializer):
class Meta:
model = User
fields = [
"dark_mode",
"show_community_scripts",
"agent_dblclick_action",
"default_agent_tbl_tab",
"client_tree_sort",
"client_tree_splitter",
"loading_bar_color",
]
class UserSerializer(ModelSerializer):
class Meta:
model = User

View File

@@ -1,8 +1,9 @@
from unittest.mock import patch
from django.test import override_settings
from tacticalrmm.test import TacticalTestCase
from accounts.models import User
from tacticalrmm.test import TacticalTestCase
class TestAccounts(TacticalTestCase):
@@ -155,6 +156,33 @@ class GetUpdateDeleteUser(TacticalTestCase):
self.check_not_authenticated("put", url)
@override_settings(ROOT_USER="john")
def test_put_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
data = {
"id": self.john.pk,
"username": "john",
"email": "johndoe@xlawgaming.com",
"first_name": "John",
"last_name": "Doe",
}
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
@override_settings(ROOT_USER="john")
def test_put_not_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
data = {
"id": self.john.pk,
"username": "john",
"email": "johndoe@xlawgaming.com",
"first_name": "John",
"last_name": "Doe",
}
self.client.force_authenticate(user=self.alice)
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 400)
def test_delete(self):
url = f"/accounts/{self.john.pk}/users/"
r = self.client.delete(url)
@@ -166,6 +194,19 @@ class GetUpdateDeleteUser(TacticalTestCase):
self.check_not_authenticated("delete", url)
@override_settings(ROOT_USER="john")
def test_delete_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
r = self.client.delete(url)
self.assertEqual(r.status_code, 200)
@override_settings(ROOT_USER="john")
def test_delete_non_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
self.client.force_authenticate(user=self.alice)
r = self.client.delete(url)
self.assertEqual(r.status_code, 400)
class TestUserAction(TacticalTestCase):
def setUp(self):
@@ -184,6 +225,21 @@ class TestUserAction(TacticalTestCase):
self.check_not_authenticated("post", url)
@override_settings(ROOT_USER="john")
def test_post_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
@override_settings(ROOT_USER="john")
def test_post_non_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
self.client.force_authenticate(user=self.alice)
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
def test_put(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk}
@@ -195,9 +251,36 @@ class TestUserAction(TacticalTestCase):
self.check_not_authenticated("put", url)
def test_darkmode(self):
@override_settings(ROOT_USER="john")
def test_put_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk}
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
user = User.objects.get(pk=self.john.pk)
self.assertEqual(user.totp_key, "")
@override_settings(ROOT_USER="john")
def test_put_non_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk}
self.client.force_authenticate(user=self.alice)
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 400)
def test_user_ui(self):
url = "/accounts/users/ui/"
data = {"dark_mode": False}
data = {
"dark_mode": True,
"show_community_scripts": True,
"agent_dblclick_action": "editagent",
"default_agent_tbl_tab": "mixed",
"client_tree_sort": "alpha",
"client_tree_splitter": 14,
"loading_bar_color": "green",
}
r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200)

View File

@@ -1,4 +1,5 @@
from django.urls import path
from . import views
urlpatterns = [

View File

@@ -1,23 +1,28 @@
import pyotp
from django.contrib.auth import login
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.contrib.auth import login
from django.db import IntegrityError
from rest_framework.views import APIView
from rest_framework.authtoken.serializers import AuthTokenSerializer
from django.shortcuts import get_object_or_404
from knox.views import LoginView as KnoxLoginView
from rest_framework import status
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework import status
from rest_framework.views import APIView
from .models import User
from agents.models import Agent
from logs.models import AuditLog
from tacticalrmm.utils import notify_error
from .serializers import UserSerializer, TOTPSetupSerializer
from .models import User
from .serializers import TOTPSetupSerializer, UserSerializer, UserUISerializer
def _is_root_user(request, user) -> bool:
return (
hasattr(settings, "ROOT_USER")
and request.user != user
and user.username == settings.ROOT_USER
)
class CheckCreds(KnoxLoginView):
@@ -60,7 +65,7 @@ class LoginView(KnoxLoginView):
if settings.DEBUG and token == "sekret":
valid = True
elif totp.verify(token, valid_window=1):
elif totp.verify(token, valid_window=10):
valid = True
if valid:
@@ -81,7 +86,7 @@ class GetAddUsers(APIView):
def post(self, request):
# add new user
try:
user = User.objects.create_user(
user = User.objects.create_user( # type: ignore
request.data["username"],
request.data["email"],
request.data["password"],
@@ -108,6 +113,9 @@ class GetUpdateDeleteUser(APIView):
def put(self, request, pk):
user = get_object_or_404(User, pk=pk)
if _is_root_user(request, user):
return notify_error("The root user cannot be modified from the UI")
serializer = UserSerializer(instance=user, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
@@ -115,7 +123,11 @@ class GetUpdateDeleteUser(APIView):
return Response("ok")
def delete(self, request, pk):
get_object_or_404(User, pk=pk).delete()
user = get_object_or_404(User, pk=pk)
if _is_root_user(request, user):
return notify_error("The root user cannot be deleted from the UI")
user.delete()
return Response("ok")
@@ -124,8 +136,10 @@ class UserActions(APIView):
# reset password
def post(self, request):
user = get_object_or_404(User, pk=request.data["id"])
if _is_root_user(request, user):
return notify_error("The root user cannot be modified from the UI")
user.set_password(request.data["password"])
user.save()
@@ -133,8 +147,10 @@ class UserActions(APIView):
# reset two factor token
def put(self, request):
user = get_object_or_404(User, pk=request.data["id"])
if _is_root_user(request, user):
return notify_error("The root user cannot be modified from the UI")
user.totp_key = ""
user.save()
@@ -160,7 +176,9 @@ class TOTPSetup(APIView):
class UserUI(APIView):
def patch(self, request):
user = request.user
user.dark_mode = request.data["dark_mode"]
user.save(update_fields=["dark_mode"])
return Response("ok")
serializer = UserUISerializer(
instance=request.user, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("ok")

View File

@@ -1,8 +1,8 @@
from django.contrib import admin
from .models import Agent, AgentOutage, RecoveryAction, Note
from .models import Agent, AgentCustomField, Note, RecoveryAction
admin.site.register(Agent)
admin.site.register(AgentOutage)
admin.site.register(RecoveryAction)
admin.site.register(Note)
admin.site.register(AgentCustomField)

View File

@@ -1,14 +1,12 @@
import json
import os
import random
import string
import os
import json
from model_bakery.recipe import Recipe, seq
from itertools import cycle
from django.utils import timezone as djangotime
from django.conf import settings
from .models import Agent
from django.conf import settings
from django.utils import timezone as djangotime
from model_bakery.recipe import Recipe, foreign_key, seq
def generate_agent_id(hostname):
@@ -16,6 +14,9 @@ def generate_agent_id(hostname):
return f"{rand}-{hostname}"
site = Recipe("clients.Site")
def get_wmi_data():
with open(
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
@@ -24,11 +25,12 @@ def get_wmi_data():
agent = Recipe(
Agent,
"agents.Agent",
site=foreign_key(site),
hostname="DESKTOP-TEST123",
version="1.3.0",
monitoring_type=cycle(["workstation", "server"]),
salt_id=generate_agent_id("DESKTOP-TEST123"),
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
agent_id=seq("asdkj3h4234-1234hg3h4g34-234jjh34|DESKTOP-TEST123"),
)
server_agent = agent.extend(
@@ -41,8 +43,12 @@ workstation_agent = agent.extend(
online_agent = agent.extend(last_seen=djangotime.now())
offline_agent = agent.extend(
last_seen=djangotime.now() - djangotime.timedelta(minutes=7)
)
overdue_agent = agent.extend(
last_seen=djangotime.now() - djangotime.timedelta(minutes=6)
last_seen=djangotime.now() - djangotime.timedelta(minutes=35)
)
agent_with_services = agent.extend(

View File

@@ -0,0 +1,93 @@
from django.core.management.base import BaseCommand
from agents.models import Agent
from clients.models import Client, Site
class Command(BaseCommand):
help = "Bulk update agent offline/overdue time"
def add_arguments(self, parser):
parser.add_argument("time", type=int, help="Time in minutes")
parser.add_argument(
"--client",
type=str,
help="Client Name",
)
parser.add_argument(
"--site",
type=str,
help="Site Name",
)
parser.add_argument(
"--offline",
action="store_true",
help="Offline",
)
parser.add_argument(
"--overdue",
action="store_true",
help="Overdue",
)
parser.add_argument(
"--all",
action="store_true",
help="All agents",
)
def handle(self, *args, **kwargs):
time = kwargs["time"]
client_name = kwargs["client"]
site_name = kwargs["site"]
all_agents = kwargs["all"]
offline = kwargs["offline"]
overdue = kwargs["overdue"]
agents = None
if offline and time < 2:
self.stdout.write(self.style.ERROR("Minimum offline time is 2 minutes"))
return
if overdue and time < 3:
self.stdout.write(self.style.ERROR("Minimum overdue time is 3 minutes"))
return
if client_name:
try:
client = Client.objects.get(name=client_name)
except Client.DoesNotExist:
self.stdout.write(
self.style.ERROR(f"Client {client_name} doesn't exist")
)
return
agents = Agent.objects.filter(site__client=client)
elif site_name:
try:
site = Site.objects.get(name=site_name)
except Site.DoesNotExist:
self.stdout.write(self.style.ERROR(f"Site {site_name} doesn't exist"))
return
agents = Agent.objects.filter(site=site)
elif all_agents:
agents = Agent.objects.all()
if agents:
if offline:
agents.update(offline_time=time)
self.stdout.write(
self.style.SUCCESS(
f"Changed offline time on {len(agents)} agents to {time} minutes"
)
)
if overdue:
agents.update(overdue_time=time)
self.stdout.write(
self.style.SUCCESS(
f"Changed overdue time on {len(agents)} agents to {time} minutes"
)
)

View File

@@ -0,0 +1,18 @@
from django.conf import settings
from django.core.management.base import BaseCommand
from agents.models import Agent
class Command(BaseCommand):
help = "Shows online agents that are not on the latest version"
def handle(self, *args, **kwargs):
q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only(
"pk", "version", "last_seen", "overdue_time", "offline_time"
)
agents = [i for i in q if i.status == "online"]
for agent in agents:
self.stdout.write(
self.style.SUCCESS(f"{agent.hostname} - v{agent.version}")
)

View File

@@ -1,8 +1,8 @@
# Generated by Django 3.0.6 on 2020-05-31 01:23
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@@ -1,7 +1,7 @@
# Generated by Django 3.0.7 on 2020-06-09 16:07
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@@ -1,7 +1,7 @@
# Generated by Django 3.0.8 on 2020-08-09 05:31
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):

View File

@@ -1,8 +1,8 @@
# Generated by Django 3.1.1 on 2020-09-22 20:57
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):

View File

@@ -1,20 +1,26 @@
# Generated by Django 3.1.2 on 2020-11-01 22:53
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clients', '0006_deployment'),
('agents', '0020_auto_20201025_2129'),
("clients", "0006_deployment"),
("agents", "0020_auto_20201025_2129"),
]
operations = [
migrations.AddField(
model_name='agent',
name='site_link',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'),
model_name="agent",
name="site_link",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="agents",
to="clients.site",
),
),
]

View File

@@ -6,16 +6,16 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0022_update_site_primary_key'),
("agents", "0022_update_site_primary_key"),
]
operations = [
migrations.RemoveField(
model_name='agent',
name='client',
model_name="agent",
name="client",
),
migrations.RemoveField(
model_name='agent',
name='site',
model_name="agent",
name="site",
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0023_auto_20201101_2312'),
("agents", "0023_auto_20201101_2312"),
]
operations = [
migrations.RenameField(
model_name='agent',
old_name='site_link',
new_name='site',
model_name="agent",
old_name="site_link",
new_name="site",
),
]

View File

@@ -0,0 +1,27 @@
# Generated by Django 3.1.3 on 2020-11-22 04:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("agents", "0024_auto_20201101_2319"),
]
operations = [
migrations.AlterField(
model_name="recoveryaction",
name="mode",
field=models.CharField(
choices=[
("salt", "Salt"),
("mesh", "Mesh"),
("command", "Command"),
("rpc", "Nats RPC"),
],
default="mesh",
max_length=50,
),
),
]

View File

@@ -0,0 +1,28 @@
# Generated by Django 3.1.3 on 2020-11-25 23:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("agents", "0025_auto_20201122_0407"),
]
operations = [
migrations.AlterField(
model_name="recoveryaction",
name="mode",
field=models.CharField(
choices=[
("salt", "Salt"),
("mesh", "Mesh"),
("command", "Command"),
("rpc", "Nats RPC"),
("checkrunner", "Checkrunner"),
],
default="mesh",
max_length=50,
),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.4 on 2021-01-29 21:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0026_auto_20201125_2334'),
]
operations = [
migrations.AddField(
model_name='agent',
name='overdue_dashboard_alert',
field=models.BooleanField(default=False),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.1.4 on 2021-02-06 15:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0027_agent_overdue_dashboard_alert'),
]
operations = [
migrations.AddField(
model_name='agentoutage',
name='outage_email_sent_time',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='agentoutage',
name='outage_sms_sent_time',
field=models.DateTimeField(blank=True, null=True),
),
]

View File

@@ -0,0 +1,16 @@
# Generated by Django 3.1.4 on 2021-02-10 21:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0028_auto_20210206_1534'),
]
operations = [
migrations.DeleteModel(
name='AgentOutage',
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.6 on 2021-02-16 08:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0029_delete_agentoutage'),
]
operations = [
migrations.AddField(
model_name='agent',
name='offline_time',
field=models.PositiveIntegerField(default=4),
),
]

View File

@@ -0,0 +1,20 @@
# Generated by Django 3.1.7 on 2021-03-04 03:57
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alerts', '0006_auto_20210217_1736'),
('agents', '0030_agent_offline_time'),
]
operations = [
migrations.AddField(
model_name='agent',
name='alert_template',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='alerts.alerttemplate'),
),
]

View File

@@ -0,0 +1,24 @@
# Generated by Django 3.1.7 on 2021-03-17 14:45
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0014_customfield'),
('agents', '0031_agent_alert_template'),
]
operations = [
migrations.CreateModel(
name='AgentCustomField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.TextField(blank=True, null=True)),
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='agents.agent')),
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='agent_fields', to='core.customfield')),
],
),
]

View File

@@ -0,0 +1,19 @@
# Generated by Django 3.1.7 on 2021-03-29 02:51
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0032_agentcustomfield'),
]
operations = [
migrations.AddField(
model_name='agentcustomfield',
name='multiple_value',
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.7 on 2021-03-29 03:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0033_agentcustomfield_multiple_value'),
]
operations = [
migrations.AddField(
model_name='agentcustomfield',
name='checkbox_value',
field=models.BooleanField(blank=True, default=False),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.1.7 on 2021-03-29 17:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0034_agentcustomfield_checkbox_value'),
]
operations = [
migrations.RenameField(
model_name='agentcustomfield',
old_name='checkbox_value',
new_name='bool_value',
),
migrations.RenameField(
model_name='agentcustomfield',
old_name='value',
new_name='string_value',
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.7 on 2021-04-17 01:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0035_auto_20210329_1709'),
]
operations = [
migrations.AddField(
model_name='agent',
name='block_policy_inheritance',
field=models.BooleanField(default=False),
),
]

View File

@@ -1,25 +1,26 @@
import requests
import datetime as dt
import time
import asyncio
import base64
from Crypto.Cipher import AES
from Crypto.Random import get_random_bytes
from Crypto.Hash import SHA3_384
from Crypto.Util.Padding import pad
import validators
import random
import re
import string
import time
from collections import Counter
from loguru import logger
from packaging import version as pyver
from distutils.version import LooseVersion
from typing import Any
from django.db import models
import msgpack
import validators
from Crypto.Cipher import AES
from Crypto.Hash import SHA3_384
from Crypto.Random import get_random_bytes
from Crypto.Util.Padding import pad
from django.conf import settings
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.utils import timezone as djangotime
from loguru import logger
from nats.aio.client import Client as NATS
from nats.aio.errors import ErrTimeout
from core.models import CoreSettings, TZ_CHOICES
from core.models import TZ_CHOICES, CoreSettings
from logs.models import BaseAuditModel
logger.configure(**settings.LOG_CONFIG)
@@ -50,6 +51,8 @@ class Agent(BaseAuditModel):
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
overdue_email_alert = models.BooleanField(default=False)
overdue_text_alert = models.BooleanField(default=False)
overdue_dashboard_alert = models.BooleanField(default=False)
offline_time = models.PositiveIntegerField(default=4)
overdue_time = models.PositiveIntegerField(default=30)
check_interval = models.PositiveIntegerField(default=120)
needs_reboot = models.BooleanField(default=False)
@@ -60,6 +63,14 @@ class Agent(BaseAuditModel):
max_length=255, choices=TZ_CHOICES, null=True, blank=True
)
maintenance_mode = models.BooleanField(default=False)
block_policy_inheritance = models.BooleanField(default=False)
alert_template = models.ForeignKey(
"alerts.AlertTemplate",
related_name="agents",
null=True,
blank=True,
on_delete=models.SET_NULL,
)
site = models.ForeignKey(
"clients.Site",
related_name="agents",
@@ -75,6 +86,24 @@ class Agent(BaseAuditModel):
on_delete=models.SET_NULL,
)
def save(self, *args, **kwargs):
# get old agent if exists
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
super(BaseAuditModel, self).save(*args, **kwargs)
# check if new agent has been created
# or check if policy have changed on agent
# or if site has changed on agent and if so generate-policies
if (
not old_agent
or (old_agent and old_agent.policy != self.policy)
or (old_agent.site != self.site)
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
):
self.generate_checks_from_policies()
self.generate_tasks_from_policies()
def __str__(self):
return self.hostname
@@ -109,14 +138,6 @@ class Agent(BaseAuditModel):
return settings.DL_32
return None
@property
def winsalt_dl(self):
if self.arch == "64":
return settings.SALT_64
elif self.arch == "32":
return settings.SALT_32
return None
@property
def win_inno_exe(self):
if self.arch == "64":
@@ -127,7 +148,7 @@ class Agent(BaseAuditModel):
@property
def status(self):
offline = djangotime.now() - djangotime.timedelta(minutes=6)
offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time)
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
if self.last_seen is not None:
@@ -142,31 +163,32 @@ class Agent(BaseAuditModel):
@property
def has_patches_pending(self):
if self.winupdates.filter(action="approve").filter(installed=False).exists():
return True
else:
return False
return self.winupdates.filter(action="approve").filter(installed=False).exists() # type: ignore
@property
def checks(self):
total, passing, failing = 0, 0, 0
total, passing, failing, warning, info = 0, 0, 0, 0, 0
if self.agentchecks.exists():
for i in self.agentchecks.all():
if self.agentchecks.exists(): # type: ignore
for i in self.agentchecks.all(): # type: ignore
total += 1
if i.status == "passing":
passing += 1
elif i.status == "failing":
failing += 1
has_failing_checks = True if failing > 0 else False
if i.alert_severity == "error":
failing += 1
elif i.alert_severity == "warning":
warning += 1
elif i.alert_severity == "info":
info += 1
ret = {
"total": total,
"passing": passing,
"failing": failing,
"has_failing_checks": has_failing_checks,
"warning": warning,
"info": info,
"has_failing_checks": failing > 0 or warning > 0,
}
return ret
@@ -181,6 +203,27 @@ class Agent(BaseAuditModel):
except:
return ["unknown cpu model"]
@property
def graphics(self):
ret, mrda = [], []
try:
graphics = self.wmi_detail["graphics"]
for i in graphics:
caption = [x["Caption"] for x in i if "Caption" in x][0]
if "microsoft remote display adapter" in caption.lower():
mrda.append("yes")
continue
ret.append([x["Caption"] for x in i if "Caption" in x][0])
# only return this if no other graphics cards
if not ret and mrda:
return "Microsoft Remote Display Adapter"
return ", ".join(ret)
except:
return "Graphics info requires agent v1.4.14"
@property
def local_ips(self):
ret = []
@@ -225,6 +268,7 @@ class Agent(BaseAuditModel):
pass
try:
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
return [x["Version"] for x in comp_sys_prod if "Version" in x][0]
except:
pass
@@ -254,33 +298,107 @@ class Agent(BaseAuditModel):
except:
return ["unknown disk"]
def check_run_interval(self) -> int:
interval = self.check_interval
# determine if any agent checks have a custom interval and set the lowest interval
for check in self.agentchecks.filter(overriden_by_policy=False): # type: ignore
if check.run_interval and check.run_interval < interval:
# don't allow check runs less than 15s
if check.run_interval < 15:
interval = 15
else:
interval = check.run_interval
return interval
def run_script(
self,
scriptpk: int,
args: list[str] = [],
timeout: int = 120,
full: bool = False,
wait: bool = False,
run_on_any: bool = False,
) -> Any:
from scripts.models import Script
script = Script.objects.get(pk=scriptpk)
parsed_args = script.parse_script_args(self, script.shell, args)
data = {
"func": "runscriptfull" if full else "runscript",
"timeout": timeout,
"script_args": parsed_args,
"payload": {
"code": script.code,
"shell": script.shell,
},
}
running_agent = self
if run_on_any:
nats_ping = {"func": "ping"}
# try on self first
r = asyncio.run(self.nats_cmd(nats_ping, timeout=1))
if r == "pong":
running_agent = self
else:
online = [
agent
for agent in Agent.objects.only(
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
)
if agent.status == "online"
]
for agent in online:
r = asyncio.run(agent.nats_cmd(nats_ping, timeout=1))
if r == "pong":
running_agent = agent
break
if running_agent.pk == self.pk:
return "Unable to find an online agent"
if wait:
return asyncio.run(running_agent.nats_cmd(data, timeout=timeout, wait=True))
else:
asyncio.run(running_agent.nats_cmd(data, wait=False))
return "ok"
# auto approves updates
def approve_updates(self):
patch_policy = self.get_patch_policy()
updates = list()
if patch_policy.critical == "approve":
updates += self.winupdates.filter(
updates += self.winupdates.filter( # type: ignore
severity="Critical", installed=False
).exclude(action="approve")
if patch_policy.important == "approve":
updates += self.winupdates.filter(
updates += self.winupdates.filter( # type: ignore
severity="Important", installed=False
).exclude(action="approve")
if patch_policy.moderate == "approve":
updates += self.winupdates.filter(
updates += self.winupdates.filter( # type: ignore
severity="Moderate", installed=False
).exclude(action="approve")
if patch_policy.low == "approve":
updates += self.winupdates.filter(severity="Low", installed=False).exclude(
updates += self.winupdates.filter(severity="Low", installed=False).exclude( # type: ignore
action="approve"
)
if patch_policy.other == "approve":
updates += self.winupdates.filter(severity="", installed=False).exclude(
updates += self.winupdates.filter(severity="", installed=False).exclude( # type: ignore
action="approve"
)
@@ -295,7 +413,7 @@ class Agent(BaseAuditModel):
site = self.site
core_settings = CoreSettings.objects.first()
patch_policy = None
agent_policy = self.winupdatepolicy.get()
agent_policy = self.winupdatepolicy.get() # type: ignore
if self.monitoring_type == "server":
# check agent policy first which should override client or site policy
@@ -304,21 +422,34 @@ class Agent(BaseAuditModel):
# check site policy if agent policy doesn't have one
elif site.server_policy and site.server_policy.winupdatepolicy.exists():
patch_policy = site.server_policy.winupdatepolicy.get()
# make sure agent isn;t blocking policy inheritance
if not self.block_policy_inheritance:
patch_policy = site.server_policy.winupdatepolicy.get()
# if site doesn't have a patch policy check the client
elif (
site.client.server_policy
and site.client.server_policy.winupdatepolicy.exists()
):
patch_policy = site.client.server_policy.winupdatepolicy.get()
# make sure agent and site are not blocking inheritance
if (
not self.block_policy_inheritance
and not site.block_policy_inheritance
):
patch_policy = site.client.server_policy.winupdatepolicy.get()
# if patch policy still doesn't exist check default policy
elif (
core_settings.server_policy
and core_settings.server_policy.winupdatepolicy.exists()
):
patch_policy = core_settings.server_policy.winupdatepolicy.get()
# make sure agent site and client are not blocking inheritance
if (
not self.block_policy_inheritance
and not site.block_policy_inheritance
and not site.client.block_policy_inheritance
):
patch_policy = core_settings.server_policy.winupdatepolicy.get()
elif self.monitoring_type == "workstation":
# check agent policy first which should override client or site policy
@@ -329,21 +460,36 @@ class Agent(BaseAuditModel):
site.workstation_policy
and site.workstation_policy.winupdatepolicy.exists()
):
patch_policy = site.workstation_policy.winupdatepolicy.get()
# make sure agent isn;t blocking policy inheritance
if not self.block_policy_inheritance:
patch_policy = site.workstation_policy.winupdatepolicy.get()
# if site doesn't have a patch policy check the client
elif (
site.client.workstation_policy
and site.client.workstation_policy.winupdatepolicy.exists()
):
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
# make sure agent and site are not blocking inheritance
if (
not self.block_policy_inheritance
and not site.block_policy_inheritance
):
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
# if patch policy still doesn't exist check default policy
elif (
core_settings.workstation_policy
and core_settings.workstation_policy.winupdatepolicy.exists()
):
patch_policy = core_settings.workstation_policy.winupdatepolicy.get()
# make sure agent site and client are not blocking inheritance
if (
not self.block_policy_inheritance
and not site.block_policy_inheritance
and not site.client.block_policy_inheritance
):
patch_policy = (
core_settings.workstation_policy.winupdatepolicy.get()
)
# if policy still doesn't exist return the agent patch policy
if not patch_policy:
@@ -380,32 +526,162 @@ class Agent(BaseAuditModel):
return patch_policy
# clear is used to delete managed policy checks from agent
# parent_checks specifies a list of checks to delete from agent with matching parent_check field
def generate_checks_from_policies(self, clear=False):
def get_approved_update_guids(self) -> list[str]:
return list(
self.winupdates.filter(action="approve", installed=False).values_list( # type: ignore
"guid", flat=True
)
)
# sets alert template assigned in the following order: policy, site, client, global
# sets None if nothing is found
def set_alert_template(self):
site = self.site
client = self.client
core = CoreSettings.objects.first()
templates = list()
# check if alert template is on a policy assigned to agent
if (
self.policy
and self.policy.alert_template
and self.policy.alert_template.is_active
):
templates.append(self.policy.alert_template)
# check if policy with alert template is assigned to the site
if (
self.monitoring_type == "server"
and site.server_policy
and site.server_policy.alert_template
and site.server_policy.alert_template.is_active
and not self.block_policy_inheritance
):
templates.append(site.server_policy.alert_template)
if (
self.monitoring_type == "workstation"
and site.workstation_policy
and site.workstation_policy.alert_template
and site.workstation_policy.alert_template.is_active
and not self.block_policy_inheritance
):
templates.append(site.workstation_policy.alert_template)
# check if alert template is assigned to site
if site.alert_template and site.alert_template.is_active:
templates.append(site.alert_template)
# check if policy with alert template is assigned to the client
if (
self.monitoring_type == "server"
and client.server_policy
and client.server_policy.alert_template
and client.server_policy.alert_template.is_active
and not self.block_policy_inheritance
and not site.block_policy_inheritance
):
templates.append(client.server_policy.alert_template)
if (
self.monitoring_type == "workstation"
and client.workstation_policy
and client.workstation_policy.alert_template
and client.workstation_policy.alert_template.is_active
and not self.block_policy_inheritance
and not site.block_policy_inheritance
):
templates.append(client.workstation_policy.alert_template)
# check if alert template is on client and return
if (
client.alert_template
and client.alert_template.is_active
and not self.block_policy_inheritance
and not site.block_policy_inheritance
):
templates.append(client.alert_template)
# check if alert template is applied globally and return
if (
core.alert_template
and core.alert_template.is_active
and not self.block_policy_inheritance
and not site.block_policy_inheritance
and not client.block_policy_inheritance
):
templates.append(core.alert_template)
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
if (
self.monitoring_type == "server"
and core.server_policy
and core.server_policy.alert_template
and core.server_policy.alert_template.is_active
and not self.block_policy_inheritance
and not site.block_policy_inheritance
and not client.block_policy_inheritance
):
templates.append(core.server_policy.alert_template)
if (
self.monitoring_type == "workstation"
and core.workstation_policy
and core.workstation_policy.alert_template
and core.workstation_policy.alert_template.is_active
and not self.block_policy_inheritance
and not site.block_policy_inheritance
and not client.block_policy_inheritance
):
templates.append(core.workstation_policy.alert_template)
# go through the templates and return the first one that isn't excluded
for template in templates:
# check if client, site, or agent has been excluded from template
if (
client.pk
in template.excluded_clients.all().values_list("pk", flat=True)
or site.pk in template.excluded_sites.all().values_list("pk", flat=True)
or self.pk
in template.excluded_agents.all()
.only("pk")
.values_list("pk", flat=True)
):
continue
# check if template is excluding desktops
elif (
self.monitoring_type == "workstation" and template.exclude_workstations
):
continue
# check if template is excluding servers
elif self.monitoring_type == "server" and template.exclude_servers:
continue
else:
# save alert_template to agent cache field
self.alert_template = template
self.save()
return template
# no alert templates found or agent has been excluded
self.alert_template = None
self.save()
return None
def generate_checks_from_policies(self):
from automation.models import Policy
# Clear agent checks managed by policy
if clear:
self.agentchecks.filter(managed_by_policy=True).delete()
# Clear agent checks that have overriden_by_policy set
self.agentchecks.update(overriden_by_policy=False)
self.agentchecks.update(overriden_by_policy=False) # type: ignore
# Generate checks based on policies
Policy.generate_policy_checks(self)
# clear is used to delete managed policy tasks from agent
# parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
def generate_tasks_from_policies(self, clear=False):
from autotasks.tasks import delete_win_task_schedule
def generate_tasks_from_policies(self):
from automation.models import Policy
# Clear agent tasks managed by policy
if clear:
for task in self.autotasks.filter(managed_by_policy=True):
delete_win_task_schedule.delay(task.pk)
# Generate tasks based on policies
Policy.generate_policy_tasks(self)
@@ -433,76 +709,40 @@ class Agent(BaseAuditModel):
except Exception:
return "err"
def salt_api_cmd(self, **kwargs):
# salt should always timeout first before the requests' timeout
async def nats_cmd(self, data: dict, timeout: int = 30, wait: bool = True):
nc = NATS()
options = {
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
"user": "tacticalrmm",
"password": settings.SECRET_KEY,
"connect_timeout": 3,
"max_reconnect_attempts": 2,
}
try:
timeout = kwargs["timeout"]
except KeyError:
# default timeout
timeout = 15
salt_timeout = 12
else:
if timeout < 8:
timeout = 8
salt_timeout = 5
await nc.connect(**options)
except:
return "natsdown"
if wait:
try:
msg = await nc.request(
self.agent_id, msgpack.dumps(data), timeout=timeout
)
except ErrTimeout:
ret = "timeout"
else:
salt_timeout = timeout - 3
try:
ret = msgpack.loads(msg.data) # type: ignore
except Exception as e:
logger.error(e)
ret = str(e)
json = {
"client": "local",
"tgt": self.salt_id,
"fun": kwargs["func"],
"timeout": salt_timeout,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
if "arg" in kwargs:
json.update({"arg": kwargs["arg"]})
if "kwargs" in kwargs:
json.update({"kwarg": kwargs["kwargs"]})
try:
resp = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[json],
timeout=timeout,
)
except Exception:
return "timeout"
try:
ret = resp.json()["return"][0][self.salt_id]
except Exception as e:
logger.error(f"{self.salt_id}: {e}")
return "error"
else:
await nc.close()
return ret
def salt_api_async(self, **kwargs):
json = {
"client": "local_async",
"tgt": self.salt_id,
"fun": kwargs["func"],
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
if "arg" in kwargs:
json.update({"arg": kwargs["arg"]})
if "kwargs" in kwargs:
json.update({"kwarg": kwargs["kwargs"]})
try:
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
except Exception:
return "timeout"
return resp
else:
await nc.publish(self.agent_id, msgpack.dumps(data))
await nc.flush()
await nc.close()
@staticmethod
def serialize(agent):
@@ -511,101 +751,18 @@ class Agent(BaseAuditModel):
ret = AgentEditSerializer(agent).data
del ret["all_timezones"]
del ret["client"]
return ret
@staticmethod
def salt_batch_async(**kwargs):
assert isinstance(kwargs["minions"], list)
json = {
"client": "local_async",
"tgt_type": "list",
"tgt": kwargs["minions"],
"fun": kwargs["func"],
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
if "arg" in kwargs:
json.update({"arg": kwargs["arg"]})
if "kwargs" in kwargs:
json.update({"kwarg": kwargs["kwargs"]})
try:
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
except Exception:
return "timeout"
return resp
def schedule_reboot(self, obj):
start_date = dt.datetime.strftime(obj, "%Y-%m-%d")
start_time = dt.datetime.strftime(obj, "%H:%M")
# let windows task scheduler automatically delete the task after it runs
end_obj = obj + dt.timedelta(minutes=15)
end_date = dt.datetime.strftime(end_obj, "%Y-%m-%d")
end_time = dt.datetime.strftime(end_obj, "%H:%M")
task_name = "TacticalRMM_SchedReboot_" + "".join(
random.choice(string.ascii_letters) for _ in range(10)
)
r = self.salt_api_cmd(
timeout=15,
func="task.create_task",
arg=[
f"name={task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Windows\\System32\\shutdown.exe"',
'arguments="/r /t 5 /f"',
"trigger_type=Once",
f'start_date="{start_date}"',
f'start_time="{start_time}"',
f'end_date="{end_date}"',
f'end_time="{end_time}"',
"ac_only=False",
"stop_if_on_batteries=False",
"delete_after=Immediately",
],
)
if r == "error" or (isinstance(r, bool) and not r):
return "failed"
elif r == "timeout":
return "timeout"
elif isinstance(r, bool) and r:
from logs.models import PendingAction
details = {
"taskname": task_name,
"time": str(obj),
}
PendingAction(agent=self, action_type="schedreboot", details=details).save()
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
return {"msg": {"time": nice_time, "agent": self.hostname}}
else:
return "failed"
def not_supported(self, version_added):
if pyver.parse(self.version) < pyver.parse(version_added):
return True
return False
def delete_superseded_updates(self):
try:
pks = [] # list of pks to delete
kbs = list(self.winupdates.values_list("kb", flat=True))
kbs = list(self.winupdates.values_list("kb", flat=True)) # type: ignore
d = Counter(kbs)
dupes = [k for k, v in d.items() if v > 1]
for dupe in dupes:
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True)
titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True) # type: ignore
# extract the version from the title and sort from oldest to newest
# skip if no version info is available therefore nothing to parse
try:
@@ -618,69 +775,42 @@ class Agent(BaseAuditModel):
continue
# append all but the latest version to our list of pks to delete
for ver in sorted_vers[:-1]:
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver)
q = self.winupdates.filter(kb=dupe).filter(title__contains=ver) # type: ignore
pks.append(q.first().pk)
pks = list(set(pks))
self.winupdates.filter(pk__in=pks).delete()
self.winupdates.filter(pk__in=pks).delete() # type: ignore
except:
pass
# define how the agent should handle pending actions
def handle_pending_actions(self):
pending_actions = self.pendingactions.filter(status="pending")
for action in pending_actions:
if action.action_type == "taskaction":
from autotasks.tasks import (
create_win_task_schedule,
enable_or_disable_win_task,
delete_win_task_schedule,
def should_create_alert(self, alert_template=None):
return (
self.overdue_dashboard_alert
or self.overdue_email_alert
or self.overdue_text_alert
or (
alert_template
and (
alert_template.agent_always_alert
or alert_template.agent_always_email
or alert_template.agent_always_text
)
task_id = action.details["task_id"]
if action.details["action"] == "taskcreate":
create_win_task_schedule.delay(task_id, pending_action=action.id)
elif action.details["action"] == "tasktoggle":
enable_or_disable_win_task.delay(
task_id, action.details["value"], pending_action=action.id
)
elif action.details["action"] == "taskdelete":
delete_win_task_schedule.delay(task_id, pending_action=action.id)
class AgentOutage(models.Model):
agent = models.ForeignKey(
Agent,
related_name="agentoutages",
null=True,
blank=True,
on_delete=models.CASCADE,
)
outage_time = models.DateTimeField(auto_now_add=True)
recovery_time = models.DateTimeField(null=True, blank=True)
outage_email_sent = models.BooleanField(default=False)
outage_sms_sent = models.BooleanField(default=False)
recovery_email_sent = models.BooleanField(default=False)
recovery_sms_sent = models.BooleanField(default=False)
@property
def is_active(self):
return False if self.recovery_time else True
)
)
def send_outage_email(self):
from core.models import CoreSettings
CORE = CoreSettings.objects.first()
CORE.send_mail(
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue",
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
(
f"Data has not been received from client {self.agent.client.name}, "
f"site {self.agent.site.name}, "
f"agent {self.agent.hostname} "
f"Data has not been received from client {self.client.name}, "
f"site {self.site.name}, "
f"agent {self.hostname} "
"within the expected time."
),
alert_template=self.alert_template,
)
def send_recovery_email(self):
@@ -688,13 +818,14 @@ class AgentOutage(models.Model):
CORE = CoreSettings.objects.first()
CORE.send_mail(
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received",
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
(
f"Data has been received from client {self.agent.client.name}, "
f"site {self.agent.site.name}, "
f"agent {self.agent.hostname} "
f"Data has been received from client {self.client.name}, "
f"site {self.site.name}, "
f"agent {self.hostname} "
"after an interruption in data transmission."
),
alert_template=self.alert_template,
)
def send_outage_sms(self):
@@ -702,7 +833,8 @@ class AgentOutage(models.Model):
CORE = CoreSettings.objects.first()
CORE.send_sms(
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue"
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
alert_template=self.alert_template,
)
def send_recovery_sms(self):
@@ -710,17 +842,17 @@ class AgentOutage(models.Model):
CORE = CoreSettings.objects.first()
CORE.send_sms(
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received"
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
alert_template=self.alert_template,
)
def __str__(self):
return self.agent.hostname
RECOVERY_CHOICES = [
("salt", "Salt"),
("mesh", "Mesh"),
("command", "Command"),
("rpc", "Nats RPC"),
("checkrunner", "Checkrunner"),
]
@@ -737,12 +869,6 @@ class RecoveryAction(models.Model):
def __str__(self):
return f"{self.agent.hostname} - {self.mode}"
def send(self):
ret = {"recovery": self.mode}
if self.mode == "command":
ret["cmd"] = self.command
return ret
class Note(models.Model):
agent = models.ForeignKey(
@@ -762,3 +888,38 @@ class Note(models.Model):
def __str__(self):
return self.agent.hostname
class AgentCustomField(models.Model):
agent = models.ForeignKey(
Agent,
related_name="custom_fields",
on_delete=models.CASCADE,
)
field = models.ForeignKey(
"core.CustomField",
related_name="agent_fields",
on_delete=models.CASCADE,
)
string_value = models.TextField(null=True, blank=True)
bool_value = models.BooleanField(blank=True, default=False)
multiple_value = ArrayField(
models.TextField(null=True, blank=True),
null=True,
blank=True,
default=list,
)
def __str__(self):
return self.field
@property
def value(self):
if self.field.type == "multiple":
return self.multiple_value
elif self.field.type == "checkbox":
return self.bool_value
else:
return self.string_value

View File

@@ -1,12 +1,10 @@
import pytz
from rest_framework import serializers
from rest_framework.fields import ReadOnlyField
from .models import Agent, Note
from winupdate.serializers import WinUpdatePolicySerializer
from clients.serializers import ClientSerializer
from winupdate.serializers import WinUpdatePolicySerializer
from .models import Agent, AgentCustomField, Note
class AgentSerializer(serializers.ModelSerializer):
@@ -18,6 +16,7 @@ class AgentSerializer(serializers.ModelSerializer):
local_ips = serializers.ReadOnlyField()
make_model = serializers.ReadOnlyField()
physical_disks = serializers.ReadOnlyField()
graphics = serializers.ReadOnlyField()
checks = serializers.ReadOnlyField()
timezone = serializers.ReadOnlyField()
all_timezones = serializers.SerializerMethodField()
@@ -34,26 +33,69 @@ class AgentSerializer(serializers.ModelSerializer):
]
class AgentOverdueActionSerializer(serializers.ModelSerializer):
class Meta:
model = Agent
fields = [
"pk",
"overdue_email_alert",
"overdue_text_alert",
"overdue_dashboard_alert",
]
class AgentTableSerializer(serializers.ModelSerializer):
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
pending_actions = serializers.SerializerMethodField()
status = serializers.ReadOnlyField()
checks = serializers.ReadOnlyField()
last_seen = serializers.SerializerMethodField()
client_name = serializers.ReadOnlyField(source="client.name")
site_name = serializers.ReadOnlyField(source="site.name")
logged_username = serializers.SerializerMethodField()
italic = serializers.SerializerMethodField()
policy = serializers.ReadOnlyField(source="policy.id")
alert_template = serializers.SerializerMethodField()
def get_last_seen(self, obj):
def get_alert_template(self, obj):
if not obj.alert_template:
return None
else:
return {
"name": obj.alert_template.name,
"always_email": obj.alert_template.agent_always_email,
"always_text": obj.alert_template.agent_always_text,
"always_alert": obj.alert_template.agent_always_alert,
}
def get_pending_actions(self, obj):
return obj.pendingactions.filter(status="pending").count()
def get_last_seen(self, obj) -> str:
if obj.time_zone is not None:
agent_tz = pytz.timezone(obj.time_zone)
else:
agent_tz = self.context["default_tz"]
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M")
def get_logged_username(self, obj) -> str:
if obj.logged_in_username == "None" and obj.status == "online":
return obj.last_logged_in_user
elif obj.logged_in_username != "None":
return obj.logged_in_username
else:
return "-"
def get_italic(self, obj) -> bool:
return obj.logged_in_username == "None" and obj.status == "online"
class Meta:
model = Agent
fields = [
"id",
"alert_template",
"hostname",
"agent_id",
"site_name",
@@ -62,23 +104,47 @@ class AgentTableSerializer(serializers.ModelSerializer):
"description",
"needs_reboot",
"patches_pending",
"pending_actions",
"status",
"overdue_text_alert",
"overdue_email_alert",
"overdue_dashboard_alert",
"last_seen",
"boot_time",
"checks",
"logged_in_username",
"last_logged_in_user",
"maintenance_mode",
"logged_username",
"italic",
"policy",
"block_policy_inheritance",
]
depth = 2
class AgentCustomFieldSerializer(serializers.ModelSerializer):
class Meta:
model = AgentCustomField
fields = (
"id",
"field",
"agent",
"value",
"string_value",
"bool_value",
"multiple_value",
)
extra_kwargs = {
"string_value": {"write_only": True},
"bool_value": {"write_only": True},
"multiple_value": {"write_only": True},
}
class AgentEditSerializer(serializers.ModelSerializer):
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
all_timezones = serializers.SerializerMethodField()
client = ClientSerializer(read_only=True)
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
def get_all_timezones(self, obj):
return pytz.all_timezones
@@ -96,10 +162,13 @@ class AgentEditSerializer(serializers.ModelSerializer):
"timezone",
"check_interval",
"overdue_time",
"offline_time",
"overdue_text_alert",
"overdue_email_alert",
"all_timezones",
"winupdatepolicy",
"policy",
"custom_fields",
]

View File

@@ -1,302 +1,280 @@
from loguru import logger
from time import sleep
import asyncio
import datetime as dt
import random
import requests
from packaging import version as pyver
import urllib.parse
from time import sleep
from typing import Union
from django.conf import settings
from django.utils import timezone as djangotime
from loguru import logger
from packaging import version as pyver
from agents.models import Agent
from core.models import CodeSignToken, CoreSettings
from logs.models import PendingAction
from scripts.models import Script
from tacticalrmm.celery import app
from agents.models import Agent, AgentOutage
from core.models import CoreSettings
from tacticalrmm.utils import run_nats_api_cmd
logger.configure(**settings.LOG_CONFIG)
OLD_64_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2.exe"
OLD_32_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2-x86.exe"
def agent_update(pk: int, codesigntoken: str = None) -> str:
from agents.utils import get_exegen_url
agent = Agent.objects.get(pk=pk)
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
return "not supported"
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
)
return "noarch"
version = settings.LATEST_AGENT_VER
inno = agent.win_inno_exe
if codesigntoken is not None and pyver.parse(version) >= pyver.parse("1.5.0"):
base_url = get_exegen_url() + "/api/v1/winagents/?"
params = {"version": version, "arch": agent.arch, "token": codesigntoken}
url = base_url + urllib.parse.urlencode(params)
else:
url = agent.winagent_dl
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).delete()
PendingAction.objects.create(
agent=agent,
action_type="agentupdate",
details={
"url": url,
"version": version,
"inno": inno,
},
)
nats_data = {
"func": "agentupdate",
"payload": {
"url": url,
"version": version,
"inno": inno,
},
}
asyncio.run(agent.nats_cmd(nats_data, wait=False))
return "created"
@app.task
def send_agent_update_task(pks, version):
assert isinstance(pks, list)
q = Agent.objects.filter(pk__in=pks)
agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)]
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
def send_agent_update_task(pks: list[int]) -> None:
try:
codesigntoken = CodeSignToken.objects.first().token
except:
codesigntoken = None
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(
f"Unable to determine arch on {agent.salt_id}. Skipping."
)
continue
# golang agent only backwards compatible with py agent 0.11.2
# force an upgrade to the latest python agent if version < 0.11.2
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
inno = (
"winagent-v0.11.2.exe"
if agent.arch == "64"
else "winagent-v0.11.2-x86.exe"
)
else:
url = agent.winagent_dl
inno = agent.win_inno_exe
logger.info(
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
)
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
logger.info(f"{agent.salt_id}: {r}")
sleep(10)
agent_update(pk, codesigntoken)
sleep(0.05)
sleep(4)
@app.task
def auto_self_agent_update_task():
def auto_self_agent_update_task() -> None:
core = CoreSettings.objects.first()
if not core.agent_auto_update:
logger.info("Agent auto update is disabled. Skipping.")
return
try:
codesigntoken = CodeSignToken.objects.first().token
except:
codesigntoken = None
q = Agent.objects.only("pk", "version")
agents = [
pks: list[int] = [
i.pk
for i in q
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
]
logger.info(f"Updating {len(agents)}")
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(
f"Unable to determine arch on {agent.salt_id}. Skipping."
)
continue
# golang agent only backwards compatible with py agent 0.11.2
# force an upgrade to the latest python agent if version < 0.11.2
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
inno = (
"winagent-v0.11.2.exe"
if agent.arch == "64"
else "winagent-v0.11.2-x86.exe"
)
else:
url = agent.winagent_dl
inno = agent.win_inno_exe
logger.info(
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
)
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
logger.info(f"{agent.salt_id}: {r}")
sleep(10)
agent_update(pk, codesigntoken)
sleep(0.05)
sleep(4)
@app.task
def update_salt_minion_task():
q = Agent.objects.all()
agents = [
i.pk
for i in q
if pyver.parse(i.version) >= pyver.parse("0.11.0")
and pyver.parse(i.salt_ver) < pyver.parse(settings.LATEST_SALT_VER)
]
def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str:
from alerts.models import Alert
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
alert = Alert.objects.get(pk=pk)
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_async(func="win_agent.update_salt")
sleep(20)
@app.task
def get_wmi_detail_task(pk):
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
return "ok"
@app.task
def sync_salt_modules_task(pk):
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
if r == "timeout" or r == "error":
return f"Unable to sync modules {agent.salt_id}"
return f"Successfully synced salt modules on {agent.hostname}"
@app.task
def batch_sync_modules_task():
# sync modules, split into chunks of 50 agents to not overload salt
agents = Agent.objects.all()
online = [i.salt_id for i in agents if i.status == "online"]
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
for chunk in chunks:
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
sleep(10)
@app.task
def batch_sysinfo_task():
# update system info using WMI
agents = Agent.objects.all()
online = [
i.salt_id
for i in agents
if not i.not_supported("0.11.0") and i.status == "online"
]
chunks = (online[i : i + 30] for i in range(0, len(online), 30))
for chunk in chunks:
Agent.salt_batch_async(minions=chunk, func="win_agent.local_sys_info")
sleep(10)
@app.task
def uninstall_agent_task(salt_id):
attempts = 0
error = False
while 1:
try:
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "local",
"tgt": salt_id,
"fun": "win_agent.uninstall_agent",
"timeout": 8,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=10,
)
ret = r.json()["return"][0][salt_id]
except Exception:
attempts += 1
else:
if ret != "ok":
attempts += 1
else:
attempts = 0
if attempts >= 10:
error = True
break
elif attempts == 0:
break
if error:
logger.error(f"{salt_id} uninstall failed")
if not alert.email_sent:
sleep(random.randint(1, 15))
alert.agent.send_outage_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
else:
logger.info(f"{salt_id} was successfully uninstalled")
try:
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "wheel",
"fun": "key.delete",
"match": salt_id,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=30,
)
except Exception:
logger.error(f"{salt_id} unable to remove salt-key")
if alert_interval:
# send an email only if the last email sent is older than alert interval
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.email_sent < delta:
sleep(random.randint(1, 10))
alert.agent.send_outage_email()
alert.email_sent = djangotime.now()
alert.save(update_fields=["email_sent"])
return "ok"
@app.task
def agent_outage_email_task(pk):
def agent_recovery_email_task(pk: int) -> str:
from alerts.models import Alert
sleep(random.randint(1, 15))
outage = AgentOutage.objects.get(pk=pk)
outage.send_outage_email()
outage.outage_email_sent = True
outage.save(update_fields=["outage_email_sent"])
alert = Alert.objects.get(pk=pk)
alert.agent.send_recovery_email()
alert.resolved_email_sent = djangotime.now()
alert.save(update_fields=["resolved_email_sent"])
return "ok"
@app.task
def agent_recovery_email_task(pk):
sleep(random.randint(1, 15))
outage = AgentOutage.objects.get(pk=pk)
outage.send_recovery_email()
outage.recovery_email_sent = True
outage.save(update_fields=["recovery_email_sent"])
def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str:
from alerts.models import Alert
alert = Alert.objects.get(pk=pk)
if not alert.sms_sent:
sleep(random.randint(1, 15))
alert.agent.send_outage_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
else:
if alert_interval:
# send an sms only if the last sms sent is older than alert interval
delta = djangotime.now() - dt.timedelta(days=alert_interval)
if alert.sms_sent < delta:
sleep(random.randint(1, 10))
alert.agent.send_outage_sms()
alert.sms_sent = djangotime.now()
alert.save(update_fields=["sms_sent"])
return "ok"
@app.task
def agent_outage_sms_task(pk):
def agent_recovery_sms_task(pk: int) -> str:
from alerts.models import Alert
sleep(random.randint(1, 3))
outage = AgentOutage.objects.get(pk=pk)
outage.send_outage_sms()
outage.outage_sms_sent = True
outage.save(update_fields=["outage_sms_sent"])
alert = Alert.objects.get(pk=pk)
alert.agent.send_recovery_sms()
alert.resolved_sms_sent = djangotime.now()
alert.save(update_fields=["resolved_sms_sent"])
return "ok"
@app.task
def agent_recovery_sms_task(pk):
sleep(random.randint(1, 3))
outage = AgentOutage.objects.get(pk=pk)
outage.send_recovery_sms()
outage.recovery_sms_sent = True
outage.save(update_fields=["recovery_sms_sent"])
def agent_outages_task() -> None:
from alerts.models import Alert
@app.task
def agent_outages_task():
agents = Agent.objects.only("pk")
agents = Agent.objects.only(
"pk",
"last_seen",
"offline_time",
"overdue_time",
"overdue_email_alert",
"overdue_text_alert",
"overdue_dashboard_alert",
)
for agent in agents:
if agent.status == "overdue":
outages = AgentOutage.objects.filter(agent=agent)
if outages and outages.last().is_active:
continue
Alert.handle_alert_failure(agent)
outage = AgentOutage(agent=agent)
outage.save()
if agent.overdue_email_alert and not agent.maintenance_mode:
agent_outage_email_task.delay(pk=outage.pk)
@app.task
def run_script_email_results_task(
agentpk: int,
scriptpk: int,
nats_timeout: int,
emails: list[str],
args: list[str] = [],
):
agent = Agent.objects.get(pk=agentpk)
script = Script.objects.get(pk=scriptpk)
r = agent.run_script(
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True
)
if r == "timeout":
logger.error(f"{agent.hostname} timed out running script.")
return
if agent.overdue_text_alert and not agent.maintenance_mode:
agent_outage_sms_task.delay(pk=outage.pk)
CORE = CoreSettings.objects.first()
subject = f"{agent.hostname} {script.name} Results"
exec_time = "{:.4f}".format(r["execution_time"])
body = (
subject
+ f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}"
)
import smtplib
from email.message import EmailMessage
msg = EmailMessage()
msg["Subject"] = subject
msg["From"] = CORE.smtp_from_email
if emails:
msg["To"] = ", ".join(emails)
else:
msg["To"] = ", ".join(CORE.email_alert_recipients)
msg.set_content(body)
try:
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
if CORE.smtp_requires_auth:
server.ehlo()
server.starttls()
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
server.send_message(msg)
server.quit()
else:
server.send_message(msg)
server.quit()
except Exception as e:
logger.error(e)
@app.task
def monitor_agents_task() -> None:
agents = Agent.objects.only(
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
)
ids = [i.agent_id for i in agents if i.status != "online"]
run_nats_api_cmd("monitor", ids)
@app.task
def get_wmi_task() -> None:
agents = Agent.objects.only(
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
)
ids = [i.agent_id for i in agents if i.status == "online"]
run_nats_api_cmd("wmi", ids)

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,16 @@
from django.urls import path
from . import views
urlpatterns = [
path("listagents/", views.AgentsTableList.as_view()),
path("listagentsnodetail/", views.list_agents_no_detail),
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
path("byclient/<int:clientpk>/", views.by_client),
path("bysite/<int:sitepk>/", views.by_site),
path("overdueaction/", views.overdue_action),
path("sendrawcmd/", views.send_raw_cmd),
path("<pk>/agentdetail/", views.agent_detail),
path("<int:pk>/meshcentral/", views.meshcentral),
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
path("poweraction/", views.power_action),
path("uninstall/", views.uninstall),
path("editagent/", views.edit_agent),
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
@@ -20,16 +18,15 @@ urlpatterns = [
path("updateagents/", views.update_agents),
path("<pk>/getprocs/", views.get_processes),
path("<pk>/<pid>/killproc/", views.kill_proc),
path("rebootlater/", views.reboot_later),
path("reboot/", views.Reboot.as_view()),
path("installagent/", views.install_agent),
path("<int:pk>/ping/", views.ping),
path("recover/", views.recover),
path("runscript/", views.run_script),
path("<int:pk>/restartmesh/", views.restart_mesh),
path("<int:pk>/recovermesh/", views.recover_mesh),
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
path("bulk/", views.bulk),
path("agent_counts/", views.agent_counts),
path("maintenance/", views.agent_maintenance),
path("<int:pk>/wmi/", views.WMI.as_view()),
]

View File

@@ -0,0 +1,37 @@
import random
import urllib.parse
import requests
from django.conf import settings
def get_exegen_url() -> str:
urls: list[str] = settings.EXE_GEN_URLS
for url in urls:
try:
r = requests.get(url, timeout=10)
except:
continue
if r.status_code == 200:
return url
return random.choice(urls)
def get_winagent_url(arch: str) -> str:
from core.models import CodeSignToken
try:
codetoken = CodeSignToken.objects.first().token
base_url = get_exegen_url() + "/api/v1/winagents/?"
params = {
"version": settings.LATEST_AGENT_VER,
"arch": arch,
"token": codetoken,
}
dl_url = base_url + urllib.parse.urlencode(params)
except:
dl_url = settings.DL_64 if arch == "64" else settings.DL_32
return dl_url

View File

@@ -1,52 +1,46 @@
from loguru import logger
import os
import subprocess
import zlib
import json
import base64
import pytz
import asyncio
import datetime as dt
from packaging import version as pyver
import os
import random
import string
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from loguru import logger
from packaging import version as pyver
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status, generics
from rest_framework.views import APIView
from .models import Agent, AgentOutage, RecoveryAction, Note
from winupdate.models import WinUpdatePolicy
from clients.models import Client, Site
from accounts.models import User
from core.models import CoreSettings
from logs.models import AuditLog, PendingAction
from scripts.models import Script
from logs.models import AuditLog
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
from winupdate.serializers import WinUpdatePolicySerializer
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
from .models import Agent, AgentCustomField, Note, RecoveryAction
from .serializers import (
AgentSerializer,
AgentHostnameSerializer,
AgentTableSerializer,
AgentCustomFieldSerializer,
AgentEditSerializer,
AgentHostnameSerializer,
AgentOverdueActionSerializer,
AgentSerializer,
AgentTableSerializer,
NoteSerializer,
NotesSerializer,
)
from winupdate.serializers import WinUpdatePolicySerializer
from .tasks import uninstall_agent_task, send_agent_update_task
from winupdate.tasks import bulk_check_for_updates_task
from scripts.tasks import run_script_bg_task, run_bulk_script_task
from tacticalrmm.utils import notify_error
from .tasks import run_script_email_results_task, send_agent_update_task
logger.configure(**settings.LOG_CONFIG)
@api_view()
def get_agent_versions(request):
agents = Agent.objects.only("pk")
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
return Response(
{
"versions": [settings.LATEST_AGENT_VER],
@@ -57,51 +51,76 @@ def get_agent_versions(request):
@api_view(["POST"])
def update_agents(request):
pks = request.data["pks"]
version = request.data["version"]
send_agent_update_task.delay(pks=pks, version=version)
q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version")
pks: list[int] = [
i.pk
for i in q
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
]
send_agent_update_task.delay(pks=pks)
return Response("ok")
@api_view()
def ping(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=5, func="test.ping")
status = "offline"
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
if r == "pong":
status = "online"
if r == "timeout" or r == "error":
return Response({"name": agent.hostname, "status": "offline"})
if isinstance(r, bool) and r:
return Response({"name": agent.hostname, "status": "online"})
else:
return Response({"name": agent.hostname, "status": "offline"})
return Response({"name": agent.hostname, "status": status})
@api_view(["DELETE"])
def uninstall(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
salt_id = agent.salt_id
name = agent.hostname
agent.delete()
uninstall_agent_task.delay(salt_id)
reload_nats()
return Response(f"{name} will now be uninstalled.")
@api_view(["PATCH"])
@api_view(["PATCH", "PUT"])
def edit_agent(request):
agent = get_object_or_404(Agent, pk=request.data["id"])
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
a_serializer.is_valid(raise_exception=True)
a_serializer.save()
policy = agent.winupdatepolicy.get()
p_serializer = WinUpdatePolicySerializer(
instance=policy, data=request.data["winupdatepolicy"][0]
)
p_serializer.is_valid(raise_exception=True)
p_serializer.save()
if "winupdatepolicy" in request.data.keys():
policy = agent.winupdatepolicy.get() # type: ignore
p_serializer = WinUpdatePolicySerializer(
instance=policy, data=request.data["winupdatepolicy"][0]
)
p_serializer.is_valid(raise_exception=True)
p_serializer.save()
if "custom_fields" in request.data.keys():
for field in request.data["custom_fields"]:
custom_field = field
custom_field["agent"] = agent.id # type: ignore
if AgentCustomField.objects.filter(
field=field["field"], agent=agent.id # type: ignore
):
value = AgentCustomField.objects.get(
field=field["field"], agent=agent.id # type: ignore
)
serializer = AgentCustomFieldSerializer(
instance=value, data=custom_field
)
serializer.is_valid(raise_exception=True)
serializer.save()
else:
serializer = AgentCustomFieldSerializer(data=custom_field)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("ok")
@@ -118,16 +137,9 @@ def meshcentral(request, pk):
if token == "err":
return notify_error("Invalid mesh token")
control = (
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=11&hide=31"
)
terminal = (
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=12&hide=31"
)
file = (
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=13&hide=31"
)
webrdp = f"{core.mesh_site}/mstsc.html?login={token}&node={agent.mesh_node_id}"
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
@@ -136,7 +148,6 @@ def meshcentral(request, pk):
"control": control,
"terminal": terminal,
"file": file,
"webrdp": webrdp,
"status": agent.status,
"client": agent.client.name,
"site": agent.site.name,
@@ -153,28 +164,23 @@ def agent_detail(request, pk):
@api_view()
def get_processes(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=20, func="win_agent.get_procs")
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error":
return notify_error("Something went wrong")
return Response(r)
@api_view()
def kill_proc(request, pk, pid):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=25, func="ps.kill_pid", arg=int(pid))
r = asyncio.run(
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
)
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error":
return notify_error("Something went wrong")
if isinstance(r, bool) and not r:
return notify_error("Unable to kill the process")
elif r != "ok":
return notify_error(r)
return Response("ok")
@@ -182,55 +188,38 @@ def kill_proc(request, pk, pid):
@api_view()
def get_event_log(request, pk, logtype, days):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(
timeout=30,
func="win_agent.get_eventlog",
arg=[logtype, int(days)],
)
if r == "timeout" or r == "error":
timeout = 180 if logtype == "Security" else 30
data = {
"func": "eventlog",
"timeout": timeout,
"payload": {
"logname": logtype,
"days": str(days),
},
}
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
if r == "timeout":
return notify_error("Unable to contact the agent")
return Response(json.loads(zlib.decompress(base64.b64decode(r["wineventlog"]))))
@api_view(["POST"])
def power_action(request):
pk = request.data["pk"]
action = request.data["action"]
agent = get_object_or_404(Agent, pk=pk)
if action == "rebootnow":
logger.info(f"{agent.hostname} was scheduled for immediate reboot")
r = agent.salt_api_cmd(
timeout=30,
func="system.reboot",
arg=3,
kwargs={"in_seconds": True},
)
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
return notify_error("Unable to contact the agent")
return Response("ok")
return Response(r)
@api_view(["POST"])
def send_raw_cmd(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
r = agent.salt_api_cmd(
timeout=request.data["timeout"],
func="cmd.run",
kwargs={
"cmd": request.data["cmd"],
timeout = int(request.data["timeout"])
data = {
"func": "rawcmd",
"timeout": timeout,
"payload": {
"command": request.data["cmd"],
"shell": request.data["shell"],
"timeout": request.data["timeout"],
},
)
}
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error" or not r:
return notify_error("Something went wrong")
AuditLog.audit_raw_command(
username=request.user.username,
@@ -239,25 +228,42 @@ def send_raw_cmd(request):
shell=request.data["shell"],
)
logger.info(f"The command {request.data['cmd']} was sent on agent {agent.hostname}")
return Response(r)
class AgentsTableList(generics.ListAPIView):
queryset = (
Agent.objects.select_related("site")
.prefetch_related("agentchecks")
.only(
class AgentsTableList(APIView):
def patch(self, request):
if "sitePK" in request.data.keys():
queryset = (
Agent.objects.select_related("site", "policy", "alert_template")
.prefetch_related("agentchecks")
.filter(site_id=request.data["sitePK"])
)
elif "clientPK" in request.data.keys():
queryset = (
Agent.objects.select_related("site", "policy", "alert_template")
.prefetch_related("agentchecks")
.filter(site__client_id=request.data["clientPK"])
)
else:
queryset = Agent.objects.select_related(
"site", "policy", "alert_template"
).prefetch_related("agentchecks")
queryset = queryset.only(
"pk",
"hostname",
"agent_id",
"site",
"policy",
"alert_template",
"monitoring_type",
"description",
"needs_reboot",
"overdue_text_alert",
"overdue_email_alert",
"overdue_time",
"offline_time",
"last_seen",
"boot_time",
"logged_in_username",
@@ -265,14 +271,7 @@ class AgentsTableList(generics.ListAPIView):
"time_zone",
"maintenance_mode",
)
)
serializer_class = AgentTableSerializer
def list(self, request):
queryset = self.get_queryset()
ctx = {
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)
}
ctx = {"default_tz": get_default_timezone()}
serializer = AgentTableSerializer(queryset, many=True, context=ctx)
return Response(serializer.data)
@@ -289,113 +288,75 @@ def agent_edit_details(request, pk):
return Response(AgentEditSerializer(agent).data)
@api_view()
def by_client(request, clientpk):
agents = (
Agent.objects.select_related("site")
.filter(site__client_id=clientpk)
.prefetch_related("agentchecks")
.only(
"pk",
"hostname",
"agent_id",
"site",
"monitoring_type",
"description",
"needs_reboot",
"overdue_text_alert",
"overdue_email_alert",
"overdue_time",
"last_seen",
"boot_time",
"logged_in_username",
"last_logged_in_user",
"time_zone",
"maintenance_mode",
)
)
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
@api_view()
def by_site(request, sitepk):
agents = (
Agent.objects.filter(site_id=sitepk)
.select_related("site")
.prefetch_related("agentchecks")
.only(
"pk",
"hostname",
"agent_id",
"site",
"monitoring_type",
"description",
"needs_reboot",
"overdue_text_alert",
"overdue_email_alert",
"overdue_time",
"last_seen",
"boot_time",
"logged_in_username",
"last_logged_in_user",
"time_zone",
"maintenance_mode",
)
)
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
@api_view(["POST"])
def overdue_action(request):
pk = request.data["pk"]
alert_type = request.data["alertType"]
action = request.data["action"]
agent = get_object_or_404(Agent, pk=pk)
if alert_type == "email" and action == "enabled":
agent.overdue_email_alert = True
agent.save(update_fields=["overdue_email_alert"])
elif alert_type == "email" and action == "disabled":
agent.overdue_email_alert = False
agent.save(update_fields=["overdue_email_alert"])
elif alert_type == "text" and action == "enabled":
agent.overdue_text_alert = True
agent.save(update_fields=["overdue_text_alert"])
elif alert_type == "text" and action == "disabled":
agent.overdue_text_alert = False
agent.save(update_fields=["overdue_text_alert"])
else:
return Response(
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
)
agent = get_object_or_404(Agent, pk=request.data["pk"])
serializer = AgentOverdueActionSerializer(
instance=agent, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(agent.hostname)
@api_view(["POST"])
def reboot_later(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
date_time = request.data["datetime"]
class Reboot(APIView):
# reboot now
def post(self, request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
if r != "ok":
return notify_error("Unable to contact the agent")
try:
obj = dt.datetime.strptime(date_time, "%Y-%m-%d %H:%M")
except Exception:
return notify_error("Invalid date")
return Response("ok")
r = agent.schedule_reboot(obj)
# reboot later
def patch(self, request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "failed":
return notify_error("Something went wrong")
try:
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
except Exception:
return notify_error("Invalid date")
return Response(r["msg"])
task_name = "TacticalRMM_SchedReboot_" + "".join(
random.choice(string.ascii_letters) for _ in range(10)
)
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "schedreboot",
"deleteafter": True,
"trigger": "once",
"name": task_name,
"year": int(dt.datetime.strftime(obj, "%Y")),
"month": dt.datetime.strftime(obj, "%B"),
"day": int(dt.datetime.strftime(obj, "%d")),
"hour": int(dt.datetime.strftime(obj, "%H")),
"min": int(dt.datetime.strftime(obj, "%M")),
},
}
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
if r != "ok":
return notify_error(r)
details = {"taskname": task_name, "time": str(obj)}
PendingAction.objects.create(
agent=agent, action_type="schedreboot", details=details
)
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
return Response(
{"time": nice_time, "agent": agent.hostname, "task_name": task_name}
)
@api_view(["POST"])
def install_agent(request):
from knox.models import AuthToken
from agents.utils import get_winagent_url
client_id = request.data["client"]
site_id = request.data["site"]
version = settings.LATEST_AGENT_VER
@@ -416,131 +377,27 @@ def install_agent(request):
inno = (
f"winagent-v{version}.exe" if arch == "64" else f"winagent-v{version}-x86.exe"
)
download_url = settings.DL_64 if arch == "64" else settings.DL_32
download_url = get_winagent_url(arch)
_, token = AuthToken.objects.create(
user=request.user, expiry=dt.timedelta(hours=request.data["expires"])
)
if request.data["installMethod"] == "exe":
go_bin = "/usr/local/rmmgo/go/bin/go"
from tacticalrmm.utils import generate_winagent_exe
if not os.path.exists(go_bin):
return Response("nogolang", status=status.HTTP_409_CONFLICT)
api = request.data["api"]
atype = request.data["agenttype"]
rdp = request.data["rdp"]
ping = request.data["ping"]
power = request.data["power"]
file_name = "rmm-installer.exe"
exe = os.path.join(settings.EXE_DIR, file_name)
if os.path.exists(exe):
try:
os.remove(exe)
except Exception as e:
logger.error(str(e))
goarch = "amd64" if arch == "64" else "386"
cmd = [
"env",
"GOOS=windows",
f"GOARCH={goarch}",
go_bin,
"build",
f"-ldflags=\"-X 'main.Inno={inno}'",
f"-X 'main.Api={api}'",
f"-X 'main.Client={client_id}'",
f"-X 'main.Site={site_id}'",
f"-X 'main.Atype={atype}'",
f"-X 'main.Rdp={rdp}'",
f"-X 'main.Ping={ping}'",
f"-X 'main.Power={power}'",
f"-X 'main.DownloadUrl={download_url}'",
f"-X 'main.Token={token}'\"",
"-o",
exe,
]
build_error = False
gen_error = False
gen = [
"env",
"GOOS=windows",
f"GOARCH={goarch}",
go_bin,
"generate",
]
try:
r1 = subprocess.run(
" ".join(gen),
capture_output=True,
shell=True,
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
)
except Exception as e:
gen_error = True
logger.error(str(e))
return Response(
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
)
if r1.returncode != 0:
gen_error = True
if r1.stdout:
logger.error(r1.stdout.decode("utf-8", errors="ignore"))
if r1.stderr:
logger.error(r1.stderr.decode("utf-8", errors="ignore"))
logger.error(f"Go build failed with return code {r1.returncode}")
if gen_error:
return Response(
"genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE
)
try:
r = subprocess.run(
" ".join(cmd),
capture_output=True,
shell=True,
cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"),
)
except Exception as e:
build_error = True
logger.error(str(e))
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
if r.returncode != 0:
build_error = True
if r.stdout:
logger.error(r.stdout.decode("utf-8", errors="ignore"))
if r.stderr:
logger.error(r.stderr.decode("utf-8", errors="ignore"))
logger.error(f"Go build failed with return code {r.returncode}")
if build_error:
return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED)
if settings.DEBUG:
with open(exe, "rb") as f:
response = HttpResponse(
f.read(),
content_type="application/vnd.microsoft.portable-executable",
)
response["Content-Disposition"] = f"inline; filename={file_name}"
return response
else:
response = HttpResponse()
response["Content-Disposition"] = f"attachment; filename={file_name}"
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
return response
return generate_winagent_exe(
client=client_id,
site=site_id,
agent_type=request.data["agenttype"],
rdp=request.data["rdp"],
ping=request.data["ping"],
power=request.data["power"],
arch=arch,
token=token,
api=request.data["api"],
file_name=request.data["fileName"],
)
elif request.data["installMethod"] == "manual":
cmd = [
@@ -548,12 +405,10 @@ def install_agent(request):
"/VERYSILENT",
"/SUPPRESSMSGBOXES",
"&&",
"timeout",
"/t",
"20",
"/nobreak",
">",
"NUL",
"ping",
"127.0.0.1",
"-n",
"5",
"&&",
r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"',
"-m",
@@ -580,8 +435,6 @@ def install_agent(request):
resp = {
"cmd": " ".join(str(i) for i in cmd),
"url": download_url,
"salt64": settings.SALT_64,
"salt32": settings.SALT_32,
}
return Response(resp)
@@ -636,35 +489,46 @@ def install_agent(request):
@api_view(["POST"])
def recover(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
mode = request.data["mode"]
if pyver.parse(agent.version) <= pyver.parse("0.9.5"):
return notify_error("Only available in agent version greater than 0.9.5")
# attempt a realtime recovery, otherwise fall back to old recovery method
if mode == "tacagent" or mode == "mesh":
data = {"func": "recover", "payload": {"mode": mode}}
r = asyncio.run(agent.nats_cmd(data, timeout=10))
if r == "ok":
return Response("Successfully completed recovery")
if agent.recoveryactions.filter(last_run=None).exists():
if agent.recoveryactions.filter(last_run=None).exists(): # type: ignore
return notify_error(
"A recovery action is currently pending. Please wait for the next agent check-in."
)
if request.data["mode"] == "command" and not request.data["cmd"]:
if mode == "command" and not request.data["cmd"]:
return notify_error("Command is required")
# if we've made it this far and realtime recovery didn't work,
# tacagent service is the fallback recovery so we obv can't use that to recover itself if it's down
if mode == "tacagent":
return notify_error(
"Requires RPC service to be functional. Please recover that first"
)
# we should only get here if all other methods fail
RecoveryAction(
agent=agent,
mode=request.data["mode"],
command=request.data["cmd"] if request.data["mode"] == "command" else None,
mode=mode,
command=request.data["cmd"] if mode == "command" else None,
).save()
return Response(f"Recovery will be attempted on the agent's next check-in")
return Response("Recovery will be attempted on the agent's next check-in")
@api_view(["POST"])
def run_script(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
script = get_object_or_404(Script, pk=request.data["scriptPK"])
output = request.data["output"]
args = request.data["args"]
req_timeout = int(request.data["timeout"]) + 3
AuditLog.audit_script_run(
@@ -674,74 +538,34 @@ def run_script(request):
)
if output == "wait":
r = agent.salt_api_cmd(
timeout=req_timeout,
func="win_agent.run_script",
kwargs={
"filepath": script.filepath,
"filename": script.filename,
"shell": script.shell,
"timeout": request.data["timeout"],
"args": args,
},
r = agent.run_script(
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True
)
return Response(r)
if isinstance(r, dict):
if r["stdout"]:
return Response(r["stdout"])
elif r["stderr"]:
return Response(r["stderr"])
else:
try:
r["retcode"]
except KeyError:
return notify_error("Something went wrong")
return Response(f"Return code: {r['retcode']}")
else:
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error":
return notify_error("Something went wrong")
else:
return notify_error(str(r))
elif output == "email":
emails = (
[] if request.data["emailmode"] == "default" else request.data["emails"]
)
run_script_email_results_task.delay(
agentpk=agent.pk,
scriptpk=script.pk,
nats_timeout=req_timeout,
emails=emails,
args=args,
)
else:
data = {
"agentpk": agent.pk,
"scriptpk": script.pk,
"timeout": request.data["timeout"],
"args": args,
}
run_script_bg_task.delay(data)
return Response(f"{script.name} will now be run on {agent.hostname}")
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout)
@api_view()
def restart_mesh(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(func="service.restart", arg="mesh agent", timeout=30)
if r == "timeout" or r == "error":
return notify_error("Unable to contact the agent")
elif isinstance(r, bool) and r:
return Response(f"Restarted Mesh Agent on {agent.hostname}")
else:
return notify_error(f"Failed to restart the Mesh Agent on {agent.hostname}")
return Response(f"{script.name} will now be run on {agent.hostname}")
@api_view()
def recover_mesh(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(
timeout=60,
func="cmd.run",
kwargs={
"cmd": r'"C:\\Program Files\\TacticalAgent\\tacticalrmm.exe" -m recovermesh',
"timeout": 55,
},
)
if r == "timeout" or r == "error":
data = {"func": "recover", "payload": {"mode": "mesh"}}
r = asyncio.run(agent.nats_cmd(data, timeout=45))
if r != "ok":
return notify_error("Unable to contact the agent")
return Response(f"Repaired mesh agent on {agent.hostname}")
@@ -805,97 +629,50 @@ def bulk(request):
return notify_error("Must select at least 1 agent")
if request.data["target"] == "client":
agents = Agent.objects.filter(site__client_id=request.data["client"])
q = Agent.objects.filter(site__client_id=request.data["client"])
elif request.data["target"] == "site":
agents = Agent.objects.filter(site_id=request.data["site"])
q = Agent.objects.filter(site_id=request.data["site"])
elif request.data["target"] == "agents":
agents = Agent.objects.filter(pk__in=request.data["agentPKs"])
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
elif request.data["target"] == "all":
agents = Agent.objects.all()
q = Agent.objects.only("pk", "monitoring_type")
else:
return notify_error("Something went wrong")
minions = [agent.salt_id for agent in agents]
if request.data["monType"] == "servers":
q = q.filter(monitoring_type="server")
elif request.data["monType"] == "workstations":
q = q.filter(monitoring_type="workstation")
agents: list[int] = [agent.pk for agent in q]
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
if request.data["mode"] == "command":
r = Agent.salt_batch_async(
minions=minions,
func="cmd.run_bg",
kwargs={
"cmd": request.data["cmd"],
"shell": request.data["shell"],
"timeout": request.data["timeout"],
},
handle_bulk_command_task.delay(
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
)
if r == "timeout":
return notify_error("Salt API not running")
return Response(f"Command will now be run on {len(minions)} agents")
return Response(f"Command will now be run on {len(agents)} agents")
elif request.data["mode"] == "script":
script = get_object_or_404(Script, pk=request.data["scriptPK"])
if script.shell == "python":
r = Agent.salt_batch_async(
minions=minions,
func="win_agent.run_script",
kwargs={
"filepath": script.filepath,
"filename": script.filename,
"shell": script.shell,
"timeout": request.data["timeout"],
"args": request.data["args"],
"bg": True,
},
)
if r == "timeout":
return notify_error("Salt API not running")
else:
data = {
"minions": minions,
"scriptpk": script.pk,
"timeout": request.data["timeout"],
"args": request.data["args"],
}
run_bulk_script_task.delay(data)
return Response(f"{script.name} will now be run on {len(minions)} agents")
handle_bulk_script_task.delay(
script.pk, agents, request.data["args"], request.data["timeout"]
)
return Response(f"{script.name} will now be run on {len(agents)} agents")
elif request.data["mode"] == "install":
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
if r == "timeout":
return notify_error("Salt API not running")
bulk_install_updates_task.delay(agents)
return Response(
f"Pending updates will now be installed on {len(minions)} agents"
f"Pending updates will now be installed on {len(agents)} agents"
)
elif request.data["mode"] == "scan":
bulk_check_for_updates_task.delay(minions=minions)
return Response(f"Patch status scan will now run on {len(minions)} agents")
bulk_check_for_updates_task.delay(agents)
return Response(f"Patch status scan will now run on {len(agents)} agents")
return notify_error("Something went wrong")
@api_view(["POST"])
def agent_counts(request):
return Response(
{
"total_server_count": Agent.objects.filter(
monitoring_type="server"
).count(),
"total_server_offline_count": AgentOutage.objects.filter(
recovery_time=None, agent__monitoring_type="server"
).count(),
"total_workstation_count": Agent.objects.filter(
monitoring_type="workstation"
).count(),
"total_workstation_offline_count": AgentOutage.objects.filter(
recovery_time=None, agent__monitoring_type="workstation"
).count(),
}
)
@api_view(["POST"])
def agent_maintenance(request):
if request.data["type"] == "Client":
@@ -917,3 +694,12 @@ def agent_maintenance(request):
return notify_error("Invalid data")
return Response("ok")
class WMI(APIView):
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
if r != "ok":
return notify_error("Unable to contact the agent")
return Response("ok")

View File

@@ -1,6 +1,6 @@
from django.contrib import admin
from .models import Alert
from .models import Alert, AlertTemplate
admin.site.register(Alert)
admin.site.register(AlertTemplate)

View File

@@ -1,7 +1,7 @@
# Generated by Django 3.1 on 2020-08-15 15:31
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
@@ -42,4 +42,4 @@ class Migration(migrations.Migration):
),
],
),
]
]

View File

@@ -27,4 +27,4 @@ class Migration(migrations.Migration):
max_length=100,
),
),
]
]

View File

@@ -1,25 +1,31 @@
# Generated by Django 3.1.2 on 2020-10-21 18:15
from django.db import migrations, models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('checks', '0010_auto_20200922_1344'),
('alerts', '0002_auto_20200815_1618'),
("checks", "0010_auto_20200922_1344"),
("alerts", "0002_auto_20200815_1618"),
]
operations = [
migrations.AddField(
model_name='alert',
name='assigned_check',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'),
model_name="alert",
name="assigned_check",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="alert",
to="checks.check",
),
),
migrations.AlterField(
model_name='alert',
name='alert_time',
model_name="alert",
name="alert_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
]

View File

@@ -0,0 +1,172 @@
# Generated by Django 3.1.4 on 2021-02-12 14:08
import django.contrib.postgres.fields
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0029_delete_agentoutage'),
('clients', '0008_auto_20201103_1430'),
('autotasks', '0017_auto_20210210_1512'),
('scripts', '0005_auto_20201207_1606'),
('alerts', '0003_auto_20201021_1815'),
]
operations = [
migrations.AddField(
model_name='alert',
name='action_execution_time',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='alert',
name='action_retcode',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='action_run',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='action_stderr',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='action_stdout',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='action_timeout',
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='alert_type',
field=models.CharField(choices=[('availability', 'Availability'), ('check', 'Check'), ('task', 'Task'), ('custom', 'Custom')], default='availability', max_length=20),
),
migrations.AddField(
model_name='alert',
name='assigned_task',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='autotasks.automatedtask'),
),
migrations.AddField(
model_name='alert',
name='email_sent',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='hidden',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='alert',
name='resolved_action_execution_time',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_action_retcode',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_action_run',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_action_stderr',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_action_stdout',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_action_timeout',
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_email_sent',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_on',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='resolved_sms_sent',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='sms_sent',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='alert',
name='snoozed',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='alert',
name='severity',
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30),
),
migrations.CreateModel(
name='AlertTemplate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('is_active', models.BooleanField(default=True)),
('action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
('resolved_action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
('email_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
('text_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
('email_from', models.EmailField(blank=True, max_length=254, null=True)),
('agent_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
('agent_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
('agent_include_desktops', models.BooleanField(blank=True, default=False, null=True)),
('agent_always_email', models.BooleanField(blank=True, default=False, null=True)),
('agent_always_text', models.BooleanField(blank=True, default=False, null=True)),
('agent_always_alert', models.BooleanField(blank=True, default=False, null=True)),
('agent_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
('check_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
('check_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
('check_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
('check_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
('check_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
('check_always_email', models.BooleanField(blank=True, default=False, null=True)),
('check_always_text', models.BooleanField(blank=True, default=False, null=True)),
('check_always_alert', models.BooleanField(blank=True, default=False, null=True)),
('check_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
('task_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
('task_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
('task_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
('task_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
('task_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
('task_always_email', models.BooleanField(blank=True, default=False, null=True)),
('task_always_text', models.BooleanField(blank=True, default=False, null=True)),
('task_always_alert', models.BooleanField(blank=True, default=False, null=True)),
('task_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
('action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alert_template', to='scripts.script')),
('excluded_agents', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='agents.Agent')),
('excluded_clients', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Client')),
('excluded_sites', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Site')),
('resolved_action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_alert_template', to='scripts.script')),
],
),
]

View File

@@ -0,0 +1,31 @@
# Generated by Django 3.1.4 on 2021-02-12 17:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alerts', '0004_auto_20210212_1408'),
]
operations = [
migrations.RemoveField(
model_name='alert',
name='action_timeout',
),
migrations.RemoveField(
model_name='alert',
name='resolved_action_timeout',
),
migrations.AddField(
model_name='alerttemplate',
name='action_timeout',
field=models.PositiveIntegerField(default=15),
),
migrations.AddField(
model_name='alerttemplate',
name='resolved_action_timeout',
field=models.PositiveIntegerField(default=15),
),
]

View File

@@ -0,0 +1,72 @@
# Generated by Django 3.1.6 on 2021-02-17 17:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alerts', '0005_auto_20210212_1745'),
]
operations = [
migrations.RemoveField(
model_name='alerttemplate',
name='agent_include_desktops',
),
migrations.AddField(
model_name='alerttemplate',
name='exclude_servers',
field=models.BooleanField(blank=True, default=False, null=True),
),
migrations.AddField(
model_name='alerttemplate',
name='exclude_workstations',
field=models.BooleanField(blank=True, default=False, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='agent_always_alert',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='agent_always_email',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='agent_always_text',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='check_always_alert',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='check_always_email',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='check_always_text',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='task_always_alert',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='task_always_email',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='task_always_text',
field=models.BooleanField(blank=True, default=None, null=True),
),
]

View File

@@ -1,5 +1,21 @@
from django.db import models
from __future__ import annotations
import re
from typing import TYPE_CHECKING, Union
from django.conf import settings
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.db.models.fields import BooleanField, PositiveIntegerField
from django.utils import timezone as djangotime
from loguru import logger
if TYPE_CHECKING:
from agents.models import Agent
from autotasks.models import AutomatedTask
from checks.models import Check
logger.configure(**settings.LOG_CONFIG)
SEVERITY_CHOICES = [
("info", "Informational"),
@@ -7,6 +23,13 @@ SEVERITY_CHOICES = [
("error", "Error"),
]
ALERT_TYPE_CHOICES = [
("availability", "Availability"),
("check", "Check"),
("task", "Task"),
("custom", "Custom"),
]
class Alert(models.Model):
agent = models.ForeignKey(
@@ -23,21 +46,584 @@ class Alert(models.Model):
null=True,
blank=True,
)
assigned_task = models.ForeignKey(
"autotasks.AutomatedTask",
related_name="alert",
on_delete=models.CASCADE,
null=True,
blank=True,
)
alert_type = models.CharField(
max_length=20, choices=ALERT_TYPE_CHOICES, default="availability"
)
message = models.TextField(null=True, blank=True)
alert_time = models.DateTimeField(auto_now_add=True, null=True)
alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True)
snoozed = models.BooleanField(default=False)
snooze_until = models.DateTimeField(null=True, blank=True)
resolved = models.BooleanField(default=False)
severity = models.CharField(
max_length=100, choices=SEVERITY_CHOICES, default="info"
resolved_on = models.DateTimeField(null=True, blank=True)
severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info")
email_sent = models.DateTimeField(null=True, blank=True)
resolved_email_sent = models.DateTimeField(null=True, blank=True)
sms_sent = models.DateTimeField(null=True, blank=True)
resolved_sms_sent = models.DateTimeField(null=True, blank=True)
hidden = models.BooleanField(default=False)
action_run = models.DateTimeField(null=True, blank=True)
action_stdout = models.TextField(null=True, blank=True)
action_stderr = models.TextField(null=True, blank=True)
action_retcode = models.IntegerField(null=True, blank=True)
action_execution_time = models.CharField(max_length=100, null=True, blank=True)
resolved_action_run = models.DateTimeField(null=True, blank=True)
resolved_action_stdout = models.TextField(null=True, blank=True)
resolved_action_stderr = models.TextField(null=True, blank=True)
resolved_action_retcode = models.IntegerField(null=True, blank=True)
resolved_action_execution_time = models.CharField(
max_length=100, null=True, blank=True
)
def __str__(self):
return self.message
def resolve(self):
self.resolved = True
self.resolved_on = djangotime.now()
self.snoozed = False
self.snooze_until = None
self.save()
@classmethod
def create_availability_alert(cls, agent):
pass
def create_or_return_availability_alert(cls, agent):
if not cls.objects.filter(agent=agent, resolved=False).exists():
return cls.objects.create(
agent=agent,
alert_type="availability",
severity="error",
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
hidden=True,
)
else:
return cls.objects.get(agent=agent, resolved=False)
@classmethod
def create_check_alert(cls, check):
pass
def create_or_return_check_alert(cls, check):
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
return cls.objects.create(
assigned_check=check,
alert_type="check",
severity=check.alert_severity,
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
hidden=True,
)
else:
return cls.objects.get(assigned_check=check, resolved=False)
@classmethod
def create_or_return_task_alert(cls, task):
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
return cls.objects.create(
assigned_task=task,
alert_type="task",
severity=task.alert_severity,
message=f"{task.agent.hostname} has task: {task.name} that failed.",
hidden=True,
)
else:
return cls.objects.get(assigned_task=task, resolved=False)
@classmethod
def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
from agents.models import Agent
from autotasks.models import AutomatedTask
from checks.models import Check
# set variables
dashboard_severities = None
email_severities = None
text_severities = None
always_dashboard = None
always_email = None
always_text = None
alert_interval = None
email_task = None
text_task = None
# check what the instance passed is
if isinstance(instance, Agent):
from agents.tasks import agent_outage_email_task, agent_outage_sms_task
email_task = agent_outage_email_task
text_task = agent_outage_sms_task
email_alert = instance.overdue_email_alert
text_alert = instance.overdue_text_alert
dashboard_alert = instance.overdue_dashboard_alert
alert_template = instance.alert_template
maintenance_mode = instance.maintenance_mode
alert_severity = "error"
agent = instance
# set alert_template settings
if alert_template:
dashboard_severities = ["error"]
email_severities = ["error"]
text_severities = ["error"]
always_dashboard = alert_template.agent_always_alert
always_email = alert_template.agent_always_email
always_text = alert_template.agent_always_text
alert_interval = alert_template.agent_periodic_alert_days
if instance.should_create_alert(alert_template):
alert = cls.create_or_return_availability_alert(instance)
else:
# check if there is an alert that exists
if cls.objects.filter(agent=instance, resolved=False).exists():
alert = cls.objects.get(agent=instance, resolved=False)
else:
alert = None
elif isinstance(instance, Check):
from checks.tasks import (
handle_check_email_alert_task,
handle_check_sms_alert_task,
)
email_task = handle_check_email_alert_task
text_task = handle_check_sms_alert_task
email_alert = instance.email_alert
text_alert = instance.text_alert
dashboard_alert = instance.dashboard_alert
alert_template = instance.agent.alert_template
maintenance_mode = instance.agent.maintenance_mode
alert_severity = instance.alert_severity
agent = instance.agent
# set alert_template settings
if alert_template:
dashboard_severities = alert_template.check_dashboard_alert_severity
email_severities = alert_template.check_email_alert_severity
text_severities = alert_template.check_text_alert_severity
always_dashboard = alert_template.check_always_alert
always_email = alert_template.check_always_email
always_text = alert_template.check_always_text
alert_interval = alert_template.check_periodic_alert_days
if instance.should_create_alert(alert_template):
alert = cls.create_or_return_check_alert(instance)
else:
# check if there is an alert that exists
if cls.objects.filter(assigned_check=instance, resolved=False).exists():
alert = cls.objects.get(assigned_check=instance, resolved=False)
else:
alert = None
elif isinstance(instance, AutomatedTask):
from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert
email_task = handle_task_email_alert
text_task = handle_task_sms_alert
email_alert = instance.email_alert
text_alert = instance.text_alert
dashboard_alert = instance.dashboard_alert
alert_template = instance.agent.alert_template
maintenance_mode = instance.agent.maintenance_mode
alert_severity = instance.alert_severity
agent = instance.agent
# set alert_template settings
if alert_template:
dashboard_severities = alert_template.task_dashboard_alert_severity
email_severities = alert_template.task_email_alert_severity
text_severities = alert_template.task_text_alert_severity
always_dashboard = alert_template.task_always_alert
always_email = alert_template.task_always_email
always_text = alert_template.task_always_text
alert_interval = alert_template.task_periodic_alert_days
if instance.should_create_alert(alert_template):
alert = cls.create_or_return_task_alert(instance)
else:
# check if there is an alert that exists
if cls.objects.filter(assigned_task=instance, resolved=False).exists():
alert = cls.objects.get(assigned_task=instance, resolved=False)
else:
alert = None
else:
return
# return if agent is in maintenance mode
if maintenance_mode or not alert:
return
# check if alert severity changed on check and update the alert
if alert_severity != alert.severity:
alert.severity = alert_severity
alert.save(update_fields=["severity"])
# create alert in dashboard if enabled
if dashboard_alert or always_dashboard:
# check if alert template is set and specific severities are configured
if alert_template and alert.severity not in dashboard_severities: # type: ignore
pass
else:
alert.hidden = False
alert.save()
# send email if enabled
if email_alert or always_email:
# check if alert template is set and specific severities are configured
if alert_template and alert.severity not in email_severities: # type: ignore
pass
else:
email_task.delay(
pk=alert.pk,
alert_interval=alert_interval,
)
# send text if enabled
if text_alert or always_text:
# check if alert template is set and specific severities are configured
if alert_template and alert.severity not in text_severities: # type: ignore
pass
else:
text_task.delay(pk=alert.pk, alert_interval=alert_interval)
# check if any scripts should be run
if alert_template and alert_template.action and not alert.action_run:
r = agent.run_script(
scriptpk=alert_template.action.pk,
args=alert.parse_script_args(alert_template.action_args),
timeout=alert_template.action_timeout,
wait=True,
full=True,
run_on_any=True,
)
# command was successful
if type(r) == dict:
alert.action_retcode = r["retcode"]
alert.action_stdout = r["stdout"]
alert.action_stderr = r["stderr"]
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
alert.action_run = djangotime.now()
alert.save()
else:
logger.error(
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert"
)
@classmethod
def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None:
from agents.models import Agent
from autotasks.models import AutomatedTask
from checks.models import Check
# set variables
email_on_resolved = False
text_on_resolved = False
resolved_email_task = None
resolved_text_task = None
# check what the instance passed is
if isinstance(instance, Agent):
from agents.tasks import agent_recovery_email_task, agent_recovery_sms_task
resolved_email_task = agent_recovery_email_task
resolved_text_task = agent_recovery_sms_task
alert_template = instance.alert_template
alert = cls.objects.get(agent=instance, resolved=False)
maintenance_mode = instance.maintenance_mode
agent = instance
if alert_template:
email_on_resolved = alert_template.agent_email_on_resolved
text_on_resolved = alert_template.agent_text_on_resolved
elif isinstance(instance, Check):
from checks.tasks import (
handle_resolved_check_email_alert_task,
handle_resolved_check_sms_alert_task,
)
resolved_email_task = handle_resolved_check_email_alert_task
resolved_text_task = handle_resolved_check_sms_alert_task
alert_template = instance.agent.alert_template
alert = cls.objects.get(assigned_check=instance, resolved=False)
maintenance_mode = instance.agent.maintenance_mode
agent = instance.agent
if alert_template:
email_on_resolved = alert_template.check_email_on_resolved
text_on_resolved = alert_template.check_text_on_resolved
elif isinstance(instance, AutomatedTask):
from autotasks.tasks import (
handle_resolved_task_email_alert,
handle_resolved_task_sms_alert,
)
resolved_email_task = handle_resolved_task_email_alert
resolved_text_task = handle_resolved_task_sms_alert
alert_template = instance.agent.alert_template
alert = cls.objects.get(assigned_task=instance, resolved=False)
maintenance_mode = instance.agent.maintenance_mode
agent = instance.agent
if alert_template:
email_on_resolved = alert_template.task_email_on_resolved
text_on_resolved = alert_template.task_text_on_resolved
else:
return
# return if agent is in maintenance mode
if maintenance_mode:
return
alert.resolve()
# check if a resolved email notification should be send
if email_on_resolved and not alert.resolved_email_sent:
resolved_email_task.delay(pk=alert.pk)
# check if resolved text should be sent
if text_on_resolved and not alert.resolved_sms_sent:
resolved_text_task.delay(pk=alert.pk)
# check if resolved script should be run
if (
alert_template
and alert_template.resolved_action
and not alert.resolved_action_run
):
r = agent.run_script(
scriptpk=alert_template.resolved_action.pk,
args=alert.parse_script_args(alert_template.resolved_action_args),
timeout=alert_template.resolved_action_timeout,
wait=True,
full=True,
run_on_any=True,
)
# command was successful
if type(r) == dict:
alert.resolved_action_retcode = r["retcode"]
alert.resolved_action_stdout = r["stdout"]
alert.resolved_action_stderr = r["stderr"]
alert.resolved_action_execution_time = "{:.4f}".format(
r["execution_time"]
)
alert.resolved_action_run = djangotime.now()
alert.save()
else:
logger.error(
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
)
def parse_script_args(self, args: list[str]):
if not args:
return []
temp_args = list()
# pattern to match for injection
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
for arg in args:
match = pattern.match(arg)
if match:
name = match.group(1)
if hasattr(self, name):
value = getattr(self, name)
else:
continue
try:
temp_args.append(re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)) # type: ignore
except Exception as e:
logger.error(e)
continue
else:
temp_args.append(arg)
return temp_args
class AlertTemplate(models.Model):
name = models.CharField(max_length=100)
is_active = models.BooleanField(default=True)
action = models.ForeignKey(
"scripts.Script",
related_name="alert_template",
blank=True,
null=True,
on_delete=models.SET_NULL,
)
action_args = ArrayField(
models.CharField(max_length=255, null=True, blank=True),
null=True,
blank=True,
default=list,
)
action_timeout = models.PositiveIntegerField(default=15)
resolved_action = models.ForeignKey(
"scripts.Script",
related_name="resolved_alert_template",
blank=True,
null=True,
on_delete=models.SET_NULL,
)
resolved_action_args = ArrayField(
models.CharField(max_length=255, null=True, blank=True),
null=True,
blank=True,
default=list,
)
resolved_action_timeout = models.PositiveIntegerField(default=15)
# overrides the global recipients
email_recipients = ArrayField(
models.CharField(max_length=100, blank=True),
null=True,
blank=True,
default=list,
)
text_recipients = ArrayField(
models.CharField(max_length=100, blank=True),
null=True,
blank=True,
default=list,
)
# overrides the from address
email_from = models.EmailField(blank=True, null=True)
# agent alert settings
agent_email_on_resolved = BooleanField(null=True, blank=True, default=False)
agent_text_on_resolved = BooleanField(null=True, blank=True, default=False)
agent_always_email = BooleanField(null=True, blank=True, default=None)
agent_always_text = BooleanField(null=True, blank=True, default=None)
agent_always_alert = BooleanField(null=True, blank=True, default=None)
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
# check alert settings
check_email_alert_severity = ArrayField(
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
blank=True,
default=list,
)
check_text_alert_severity = ArrayField(
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
blank=True,
default=list,
)
check_dashboard_alert_severity = ArrayField(
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
blank=True,
default=list,
)
check_email_on_resolved = BooleanField(null=True, blank=True, default=False)
check_text_on_resolved = BooleanField(null=True, blank=True, default=False)
check_always_email = BooleanField(null=True, blank=True, default=None)
check_always_text = BooleanField(null=True, blank=True, default=None)
check_always_alert = BooleanField(null=True, blank=True, default=None)
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
# task alert settings
task_email_alert_severity = ArrayField(
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
blank=True,
default=list,
)
task_text_alert_severity = ArrayField(
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
blank=True,
default=list,
)
task_dashboard_alert_severity = ArrayField(
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
blank=True,
default=list,
)
task_email_on_resolved = BooleanField(null=True, blank=True, default=False)
task_text_on_resolved = BooleanField(null=True, blank=True, default=False)
task_always_email = BooleanField(null=True, blank=True, default=None)
task_always_text = BooleanField(null=True, blank=True, default=None)
task_always_alert = BooleanField(null=True, blank=True, default=None)
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
# exclusion settings
exclude_workstations = BooleanField(null=True, blank=True, default=False)
exclude_servers = BooleanField(null=True, blank=True, default=False)
excluded_sites = models.ManyToManyField(
"clients.Site", related_name="alert_exclusions", blank=True
)
excluded_clients = models.ManyToManyField(
"clients.Client", related_name="alert_exclusions", blank=True
)
excluded_agents = models.ManyToManyField(
"agents.Agent", related_name="alert_exclusions", blank=True
)
def __str__(self):
return self.name
@property
def has_agent_settings(self) -> bool:
return (
self.agent_email_on_resolved
or self.agent_text_on_resolved
or self.agent_always_email
or self.agent_always_text
or self.agent_always_alert
or bool(self.agent_periodic_alert_days)
)
@property
def has_check_settings(self) -> bool:
return (
bool(self.check_email_alert_severity)
or bool(self.check_text_alert_severity)
or bool(self.check_dashboard_alert_severity)
or self.check_email_on_resolved
or self.check_text_on_resolved
or self.check_always_email
or self.check_always_text
or self.check_always_alert
or bool(self.check_periodic_alert_days)
)
@property
def has_task_settings(self) -> bool:
return (
bool(self.task_email_alert_severity)
or bool(self.task_text_alert_severity)
or bool(self.task_dashboard_alert_severity)
or self.task_email_on_resolved
or self.task_text_on_resolved
or self.task_always_email
or self.task_always_text
or self.task_always_alert
or bool(self.task_periodic_alert_days)
)
@property
def has_core_settings(self) -> bool:
return bool(self.email_from) or self.email_recipients or self.text_recipients
@property
def is_default_template(self) -> bool:
return self.default_alert_template.exists() # type: ignore

View File

@@ -1,19 +1,121 @@
from rest_framework.serializers import (
ModelSerializer,
ReadOnlyField,
DateTimeField,
)
from rest_framework.fields import SerializerMethodField
from rest_framework.serializers import ModelSerializer, ReadOnlyField
from .models import Alert
from automation.serializers import PolicySerializer
from clients.serializers import ClientSerializer, SiteSerializer
from tacticalrmm.utils import get_default_timezone
from .models import Alert, AlertTemplate
class AlertSerializer(ModelSerializer):
hostname = ReadOnlyField(source="agent.hostname")
client = ReadOnlyField(source="agent.client")
site = ReadOnlyField(source="agent.site")
alert_time = DateTimeField(format="iso-8601")
hostname = SerializerMethodField(read_only=True)
client = SerializerMethodField(read_only=True)
site = SerializerMethodField(read_only=True)
alert_time = SerializerMethodField(read_only=True)
resolve_on = SerializerMethodField(read_only=True)
snoozed_until = SerializerMethodField(read_only=True)
def get_hostname(self, instance):
if instance.alert_type == "availability":
return instance.agent.hostname if instance.agent else ""
elif instance.alert_type == "check":
return (
instance.assigned_check.agent.hostname
if instance.assigned_check
else ""
)
elif instance.alert_type == "task":
return (
instance.assigned_task.agent.hostname if instance.assigned_task else ""
)
else:
return ""
def get_client(self, instance):
if instance.alert_type == "availability":
return instance.agent.client.name if instance.agent else ""
elif instance.alert_type == "check":
return (
instance.assigned_check.agent.client.name
if instance.assigned_check
else ""
)
elif instance.alert_type == "task":
return (
instance.assigned_task.agent.client.name
if instance.assigned_task
else ""
)
else:
return ""
def get_site(self, instance):
if instance.alert_type == "availability":
return instance.agent.site.name if instance.agent else ""
elif instance.alert_type == "check":
return (
instance.assigned_check.agent.site.name
if instance.assigned_check
else ""
)
elif instance.alert_type == "task":
return (
instance.assigned_task.agent.site.name if instance.assigned_task else ""
)
else:
return ""
def get_alert_time(self, instance):
if instance.alert_time:
return instance.alert_time.astimezone(get_default_timezone()).timestamp()
else:
return None
def get_resolve_on(self, instance):
if instance.resolved_on:
return instance.resolved_on.astimezone(get_default_timezone()).timestamp()
else:
return None
def get_snoozed_until(self, instance):
if instance.snooze_until:
return instance.snooze_until.astimezone(get_default_timezone()).timestamp()
return None
class Meta:
model = Alert
fields = "__all__"
fields = "__all__"
class AlertTemplateSerializer(ModelSerializer):
agent_settings = ReadOnlyField(source="has_agent_settings")
check_settings = ReadOnlyField(source="has_check_settings")
task_settings = ReadOnlyField(source="has_task_settings")
core_settings = ReadOnlyField(source="has_core_settings")
default_template = ReadOnlyField(source="is_default_template")
action_name = ReadOnlyField(source="action.name")
resolved_action_name = ReadOnlyField(source="resolved_action.name")
applied_count = SerializerMethodField()
class Meta:
model = AlertTemplate
fields = "__all__"
def get_applied_count(self, instance):
count = 0
count += instance.policies.count()
count += instance.clients.count()
count += instance.sites.count()
return count
class AlertTemplateRelationSerializer(ModelSerializer):
policies = PolicySerializer(read_only=True, many=True)
clients = ClientSerializer(read_only=True, many=True)
sites = SiteSerializer(read_only=True, many=True)
class Meta:
model = AlertTemplate
fields = "__all__"

View File

@@ -0,0 +1,24 @@
from django.utils import timezone as djangotime
from alerts.models import Alert
from tacticalrmm.celery import app
@app.task
def unsnooze_alerts() -> str:
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
snoozed=False, snooze_until=None
)
return "ok"
@app.task
def cache_agents_alert_template():
from agents.models import Agent
for agent in Agent.objects.only("pk"):
agent.set_alert_template()
return "ok"

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,12 @@
from django.urls import path
from . import views
urlpatterns = [
path("alerts/", views.GetAddAlerts.as_view()),
path("bulk/", views.BulkAlerts.as_view()),
path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
path("alerttemplates/", views.GetAddAlertTemplates.as_view()),
path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
]

View File

@@ -1,19 +1,104 @@
from datetime import datetime as dt
from django.db.models import Q
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from django.utils import timezone as djangotime
from rest_framework.response import Response
from rest_framework import status
from rest_framework.views import APIView
from .models import Alert
from tacticalrmm.utils import notify_error
from .serializers import AlertSerializer
from .models import Alert, AlertTemplate
from .serializers import (
AlertSerializer,
AlertTemplateRelationSerializer,
AlertTemplateSerializer,
)
from .tasks import cache_agents_alert_template
class GetAddAlerts(APIView):
def get(self, request):
alerts = Alert.objects.all()
def patch(self, request):
return Response(AlertSerializer(alerts, many=True).data)
# top 10 alerts for dashboard icon
if "top" in request.data.keys():
alerts = Alert.objects.filter(
resolved=False, snoozed=False, hidden=False
).order_by("alert_time")[: int(request.data["top"])]
count = Alert.objects.filter(
resolved=False, snoozed=False, hidden=False
).count()
return Response(
{
"alerts_count": count,
"alerts": AlertSerializer(alerts, many=True).data,
}
)
elif any(
key
in [
"timeFilter",
"clientFilter",
"severityFilter",
"resolvedFilter",
"snoozedFilter",
]
for key in request.data.keys()
):
clientFilter = Q()
severityFilter = Q()
timeFilter = Q()
resolvedFilter = Q()
snoozedFilter = Q()
if (
"snoozedFilter" in request.data.keys()
and not request.data["snoozedFilter"]
):
snoozedFilter = Q(snoozed=request.data["snoozedFilter"])
if (
"resolvedFilter" in request.data.keys()
and not request.data["resolvedFilter"]
):
resolvedFilter = Q(resolved=request.data["resolvedFilter"])
if "clientFilter" in request.data.keys():
from agents.models import Agent
from clients.models import Client
clients = Client.objects.filter(
pk__in=request.data["clientFilter"]
).values_list("id")
agents = Agent.objects.filter(site__client_id__in=clients).values_list(
"id"
)
clientFilter = Q(agent__in=agents)
if "severityFilter" in request.data.keys():
severityFilter = Q(severity__in=request.data["severityFilter"])
if "timeFilter" in request.data.keys():
timeFilter = Q(
alert_time__lte=djangotime.make_aware(dt.today()),
alert_time__gt=djangotime.make_aware(dt.today())
- djangotime.timedelta(days=int(request.data["timeFilter"])),
)
alerts = (
Alert.objects.filter(clientFilter)
.filter(severityFilter)
.filter(resolvedFilter)
.filter(snoozedFilter)
.filter(timeFilter)
)
return Response(AlertSerializer(alerts, many=True).data)
else:
alerts = Alert.objects.all()
return Response(AlertSerializer(alerts, many=True).data)
def post(self, request):
serializer = AlertSerializer(data=request.data, partial=True)
@@ -32,7 +117,40 @@ class GetUpdateDeleteAlert(APIView):
def put(self, request, pk):
alert = get_object_or_404(Alert, pk=pk)
serializer = AlertSerializer(instance=alert, data=request.data, partial=True)
data = request.data
if "type" in data.keys():
if data["type"] == "resolve":
data = {
"resolved": True,
"resolved_on": djangotime.now(),
"snoozed": False,
}
# unable to set snooze_until to none in serialzier
alert.snooze_until = None
alert.save()
elif data["type"] == "snooze":
if "snooze_days" in data.keys():
data = {
"snoozed": True,
"snooze_until": djangotime.now()
+ djangotime.timedelta(days=int(data["snooze_days"])),
}
else:
return notify_error(
"Missing 'snoozed_days' when trying to snooze alert"
)
elif data["type"] == "unsnooze":
data = {"snoozed": False}
# unable to set snooze_until to none in serialzier
alert.snooze_until = None
alert.save()
else:
return notify_error("There was an error in the request data")
serializer = AlertSerializer(instance=alert, data=data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
@@ -42,3 +160,77 @@ class GetUpdateDeleteAlert(APIView):
Alert.objects.get(pk=pk).delete()
return Response("ok")
class BulkAlerts(APIView):
def post(self, request):
if request.data["bulk_action"] == "resolve":
Alert.objects.filter(id__in=request.data["alerts"]).update(
resolved=True,
resolved_on=djangotime.now(),
snoozed=False,
snooze_until=None,
)
return Response("ok")
elif request.data["bulk_action"] == "snooze":
if "snooze_days" in request.data.keys():
Alert.objects.filter(id__in=request.data["alerts"]).update(
snoozed=True,
snooze_until=djangotime.now()
+ djangotime.timedelta(days=int(request.data["snooze_days"])),
)
return Response("ok")
return notify_error("The request was invalid")
class GetAddAlertTemplates(APIView):
def get(self, request):
alert_templates = AlertTemplate.objects.all()
return Response(AlertTemplateSerializer(alert_templates, many=True).data)
def post(self, request):
serializer = AlertTemplateSerializer(data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
# cache alert_template value on agents
cache_agents_alert_template.delay()
return Response("ok")
class GetUpdateDeleteAlertTemplate(APIView):
def get(self, request, pk):
alert_template = get_object_or_404(AlertTemplate, pk=pk)
return Response(AlertTemplateSerializer(alert_template).data)
def put(self, request, pk):
alert_template = get_object_or_404(AlertTemplate, pk=pk)
serializer = AlertTemplateSerializer(
instance=alert_template, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save()
# cache alert_template value on agents
cache_agents_alert_template.delay()
return Response("ok")
def delete(self, request, pk):
get_object_or_404(AlertTemplate, pk=pk).delete()
# cache alert_template value on agents
cache_agents_alert_template.delay()
return Response("ok")
class RelatedAlertTemplate(APIView):
def get(self, request, pk):
alert_template = get_object_or_404(AlertTemplate, pk=pk)
return Response(AlertTemplateRelationSerializer(alert_template).data)

View File

@@ -1,5 +0,0 @@
from django.apps import AppConfig
class ApiConfig(AppConfig):
name = "api"

View File

@@ -1,11 +0,0 @@
from django.urls import path
from . import views
from apiv3 import views as v3_views
urlpatterns = [
path("triggerpatchscan/", views.trigger_patch_scan),
path("<int:pk>/checkrunner/", views.CheckRunner.as_view()),
path("<int:pk>/taskrunner/", views.TaskRunner.as_view()),
path("<int:pk>/saltinfo/", views.SaltInfo.as_view()),
path("<int:pk>/meshinfo/", v3_views.MeshInfo.as_view()),
]

View File

@@ -1,149 +0,0 @@
from loguru import logger
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import (
api_view,
authentication_classes,
permission_classes,
)
from agents.models import Agent
from checks.models import Check
from autotasks.models import AutomatedTask
from winupdate.tasks import check_for_updates_task
from autotasks.serializers import TaskRunnerGetSerializer, TaskRunnerPatchSerializer
from checks.serializers import CheckRunnerGetSerializer, CheckResultsSerializer
logger.configure(**settings.LOG_CONFIG)
@api_view(["PATCH"])
@authentication_classes((TokenAuthentication,))
@permission_classes((IsAuthenticated,))
def trigger_patch_scan(request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
reboot_policy = agent.get_patch_policy().reboot_after_install
reboot = False
if reboot_policy == "always":
reboot = True
if request.data["reboot"]:
if reboot_policy == "required":
reboot = True
elif reboot_policy == "never":
agent.needs_reboot = True
agent.save(update_fields=["needs_reboot"])
if reboot:
r = agent.salt_api_cmd(
timeout=15,
func="system.reboot",
arg=7,
kwargs={"in_seconds": True},
)
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
else:
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
else:
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
return Response("ok")
class CheckRunner(APIView):
"""
For windows agent
"""
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
checks = Check.objects.filter(agent__pk=pk, overriden_by_policy=False)
ret = {
"agent": agent.pk,
"check_interval": agent.check_interval,
"checks": CheckRunnerGetSerializer(checks, many=True).data,
}
return Response(ret)
def patch(self, request, pk):
check = get_object_or_404(Check, pk=pk)
if check.check_type != "cpuload" and check.check_type != "memory":
serializer = CheckResultsSerializer(
instance=check, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save(last_run=djangotime.now())
else:
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
check.handle_check(request.data)
return Response("ok")
class TaskRunner(APIView):
"""
For the windows python agent
"""
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk):
task = get_object_or_404(AutomatedTask, pk=pk)
return Response(TaskRunnerGetSerializer(task).data)
def patch(self, request, pk):
task = get_object_or_404(AutomatedTask, pk=pk)
serializer = TaskRunnerPatchSerializer(
instance=task, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save(last_run=djangotime.now())
return Response("ok")
class SaltInfo(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
ret = {
"latestVer": settings.LATEST_SALT_VER,
"currentVer": agent.salt_ver,
"salt_id": agent.salt_id,
}
return Response(ret)
def patch(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
agent.salt_ver = request.data["ver"]
agent.save(update_fields=["salt_ver"])
return Response("ok")

View File

@@ -1,5 +0,0 @@
from django.apps import AppConfig
class Apiv2Config(AppConfig):
name = 'apiv2'

View File

@@ -1,38 +0,0 @@
from tacticalrmm.test import TacticalTestCase
from unittest.mock import patch
from model_bakery import baker
from itertools import cycle
class TestAPIv2(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
@patch("agents.models.Agent.salt_api_cmd")
def test_sync_modules(self, mock_ret):
# setup data
agent = baker.make_recipe("agents.agent")
url = "/api/v2/saltminion/"
payload = {"agent_id": agent.agent_id}
mock_ret.return_value = "error"
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 400)
mock_ret.return_value = []
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "Modules are already in sync")
mock_ret.return_value = ["modules.win_agent"]
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "Successfully synced salt modules")
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "Successfully synced salt modules")
self.check_not_authenticated("patch", url)

View File

@@ -1,14 +0,0 @@
from django.urls import path
from . import views
from apiv3 import views as v3_views
urlpatterns = [
path("newagent/", v3_views.NewAgent.as_view()),
path("meshexe/", v3_views.MeshExe.as_view()),
path("saltminion/", v3_views.SaltMinion.as_view()),
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
path("sysinfo/", v3_views.SysInfo.as_view()),
path("hello/", v3_views.Hello.as_view()),
path("checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
]

View File

@@ -1,41 +0,0 @@
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from agents.models import Agent
from checks.models import Check
from checks.serializers import CheckRunnerGetSerializerV2
class CheckRunner(APIView):
"""
For the windows python agent
"""
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
agent.last_seen = djangotime.now()
agent.save(update_fields=["last_seen"])
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
ret = {
"agent": agent.pk,
"check_interval": agent.check_interval,
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
}
return Response(ret)
def patch(self, request):
check = get_object_or_404(Check, pk=request.data["id"])
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
status = check.handle_checkv2(request.data)
return Response(status)

View File

@@ -1,11 +1,13 @@
import os
import json
import os
from unittest.mock import patch
from django.conf import settings
from tacticalrmm.test import TacticalTestCase
from unittest.mock import patch
from django.utils import timezone as djangotime
from model_bakery import baker
from itertools import cycle
from autotasks.models import AutomatedTask
from tacticalrmm.test import TacticalTestCase
class TestAPIv3(TacticalTestCase):
@@ -17,8 +19,44 @@ class TestAPIv3(TacticalTestCase):
def test_get_checks(self):
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"
# add a check
check1 = baker.make_recipe("checks.ping_check", agent=self.agent)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], self.agent.check_interval) # type: ignore
self.assertEqual(len(r.data["checks"]), 1) # type: ignore
# override check run interval
check2 = baker.make_recipe(
"checks.ping_check", agent=self.agent, run_interval=20
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], 20) # type: ignore
self.assertEqual(len(r.data["checks"]), 2) # type: ignore
# Set last_run on both checks and should return an empty list
check1.last_run = djangotime.now()
check1.save()
check2.last_run = djangotime.now()
check2.save()
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], 20) # type: ignore
self.assertFalse(r.data["checks"]) # type: ignore
# set last_run greater than interval
check1.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
check1.save()
check2.last_run = djangotime.now() - djangotime.timedelta(seconds=200)
check2.save()
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["check_interval"], 20) # type: ignore
self.assertEquals(len(r.data["checks"]), 2) # type: ignore
url = "/api/v3/Maj34ACb324j234asdj2n34kASDjh34-DESKTOPTEST123/checkrunner/"
r = self.client.get(url)
@@ -26,46 +64,10 @@ class TestAPIv3(TacticalTestCase):
self.check_not_authenticated("get", url)
def test_get_salt_minion(self):
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertIn("latestVer", r.json().keys())
self.assertIn("currentVer", r.json().keys())
self.assertIn("salt_id", r.json().keys())
self.assertIn("downloadURL", r.json().keys())
r2 = self.client.get(url2)
self.assertEqual(r2.status_code, 200)
self.check_not_authenticated("get", url)
self.check_not_authenticated("get", url2)
def test_get_mesh_info(self):
url = f"/api/v3/{self.agent.pk}/meshinfo/"
url2 = f"/api/v1/{self.agent.pk}/meshinfo/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
r = self.client.get(url2)
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("get", url)
self.check_not_authenticated("get", url2)
def test_get_winupdater(self):
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("get", url)
def test_sysinfo(self):
# TODO replace this with golang wmi sample data
url = f"/api/v3/sysinfo/"
url = "/api/v3/sysinfo/"
with open(
os.path.join(
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
@@ -80,19 +82,260 @@ class TestAPIv3(TacticalTestCase):
self.check_not_authenticated("patch", url)
def test_hello_patch(self):
url = f"/api/v3/hello/"
def test_checkrunner_interval(self):
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(
r.json(),
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
)
# add check to agent with check interval set
check = baker.make_recipe(
"checks.ping_check", agent=self.agent, run_interval=30
)
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(
r.json(),
{"agent": self.agent.pk, "check_interval": 30},
)
# minimum check run interval is 15 seconds
check = baker.make_recipe("checks.ping_check", agent=self.agent, run_interval=5)
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(
r.json(),
{"agent": self.agent.pk, "check_interval": 15},
)
def test_run_checks(self):
# force run all checks regardless of interval
agent = baker.make_recipe("agents.online_agent")
baker.make_recipe("checks.ping_check", agent=agent)
baker.make_recipe("checks.diskspace_check", agent=agent)
baker.make_recipe("checks.cpuload_check", agent=agent)
baker.make_recipe("checks.memory_check", agent=agent)
baker.make_recipe("checks.eventlog_check", agent=agent)
for _ in range(10):
baker.make_recipe("checks.script_check", agent=agent)
url = f"/api/v3/{agent.agent_id}/runchecks/"
r = self.client.get(url)
self.assertEqual(r.json()["agent"], agent.pk)
self.assertIsInstance(r.json()["check_interval"], int)
self.assertEqual(len(r.json()["checks"]), 15)
def test_checkin_patch(self):
from logs.models import PendingAction
url = "/api/v3/checkin/"
agent_updated = baker.make_recipe("agents.agent", version="1.3.0")
PendingAction.objects.create(
agent=agent_updated,
action_type="agentupdate",
details={
"url": agent_updated.winagent_dl,
"version": agent_updated.version,
"inno": agent_updated.win_inno_exe,
},
)
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
self.assertEqual(action.status, "pending")
# test agent failed to update and still on same version
payload = {
"agent_id": self.agent.agent_id,
"logged_in_username": "None",
"disks": [],
"func": "hello",
"agent_id": agent_updated.agent_id,
"version": "1.3.0",
}
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
self.assertEqual(action.status, "pending")
# test agent successful update
payload["version"] = settings.LATEST_AGENT_VER
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
self.assertEqual(action.status, "completed")
action.delete()
@patch("apiv3.views.reload_nats")
def test_agent_recovery(self, reload_nats):
reload_nats.return_value = "ok"
r = self.client.get("/api/v3/34jahsdkjasncASDjhg2b3j4r/recover/")
self.assertEqual(r.status_code, 404)
agent = baker.make_recipe("agents.online_agent")
url = f"/api/v3/{agent.agent_id}/recovery/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.json(), {"mode": "pass", "shellcmd": ""})
reload_nats.assert_not_called()
baker.make("agents.RecoveryAction", agent=agent, mode="mesh")
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.json(), {"mode": "mesh", "shellcmd": ""})
reload_nats.assert_not_called()
baker.make(
"agents.RecoveryAction",
agent=agent,
mode="command",
command="shutdown /r /t 5 /f",
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(
r.json(), {"mode": "command", "shellcmd": "shutdown /r /t 5 /f"}
)
reload_nats.assert_not_called()
baker.make("agents.RecoveryAction", agent=agent, mode="rpc")
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.json(), {"mode": "rpc", "shellcmd": ""})
reload_nats.assert_called_once()
def test_task_runner_get(self):
from autotasks.serializers import TaskGOGetSerializer
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
self.assertEqual(r.status_code, 404)
# setup data
agent = baker.make_recipe("agents.agent")
task = baker.make("autotasks.AutomatedTask", agent=agent)
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(TaskGOGetSerializer(task).data, r.data) # type: ignore
def test_task_runner_results(self):
from agents.models import AgentCustomField
r = self.client.patch("/api/v3/500/asdf9df9dfdf/taskrunner/")
self.assertEqual(r.status_code, 404)
# setup data
agent = baker.make_recipe("agents.agent")
task = baker.make("autotasks.AutomatedTask", agent=agent)
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
# test passing task
data = {
"stdout": "test test \ntestest stdgsd\n",
"stderr": "",
"retcode": 0,
"execution_time": 3.560,
}
r = self.client.patch(url, payload, format="json")
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "passing") # type: ignore
payload["logged_in_username"] = "Bob"
r = self.client.patch(url, payload, format="json")
# test failing task
data = {
"stdout": "test test \ntestest stdgsd\n",
"stderr": "",
"retcode": 1,
"execution_time": 3.560,
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
self.check_not_authenticated("patch", url)
# test collector task
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
boolean = baker.make(
"core.CustomField", model="agent", type="checkbox", name="Test1"
)
multiple = baker.make(
"core.CustomField", model="agent", type="multiple", name="Test2"
)
# test text fields
task.custom_field = text # type: ignore
task.save() # type: ignore
# test failing failing with stderr
data = {
"stdout": "test test \nthe last line",
"stderr": "This is an error",
"retcode": 1,
"execution_time": 3.560,
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
# test saving to text field
data = {
"stdout": "test test \nthe last line",
"stderr": "",
"retcode": 0,
"execution_time": 3.560,
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertEqual(AgentCustomField.objects.get(field=text, agent=task.agent).value, "the last line") # type: ignore
# test saving to checkbox field
task.custom_field = boolean # type: ignore
task.save() # type: ignore
data = {
"stdout": "1",
"stderr": "",
"retcode": 0,
"execution_time": 3.560,
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertTrue(AgentCustomField.objects.get(field=boolean, agent=task.agent).value) # type: ignore
# test saving to multiple field with commas
task.custom_field = multiple # type: ignore
task.save() # type: ignore
data = {
"stdout": "this,is,an,array",
"stderr": "",
"retcode": 0,
"execution_time": 3.560,
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this", "is", "an", "array"]) # type: ignore
# test mutiple with a single value
data = {
"stdout": "this",
"stderr": "",
"retcode": 0,
"execution_time": 3.560,
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this"]) # type: ignore

View File

@@ -1,17 +1,23 @@
from django.urls import path
from . import views
urlpatterns = [
path("hello/", views.Hello.as_view()),
path("checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/runchecks/", views.RunChecks.as_view()),
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
path("saltminion/", views.SaltMinion.as_view()),
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
path("meshexe/", views.MeshExe.as_view()),
path("sysinfo/", views.SysInfo.as_view()),
path("newagent/", views.NewAgent.as_view()),
path("winupdater/", views.WinUpdater.as_view()),
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
path("software/", views.Software.as_view()),
path("installer/", views.Installer.as_view()),
path("checkin/", views.CheckIn.as_view()),
path("syncmesh/", views.SyncMeshNodeID.as_view()),
path("choco/", views.Choco.as_view()),
path("winupdates/", views.WinUpdates.as_view()),
path("superseded/", views.SupersededWinUpdate.as_view()),
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
]

View File

@@ -1,69 +1,94 @@
import asyncio
import os
import requests
from loguru import logger
import time
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from django.http import HttpResponse
from rest_framework import serializers
from loguru import logger
from packaging import version as pyver
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.authtoken.models import Token
from agents.models import Agent
from checks.models import Check
from autotasks.models import AutomatedTask
from accounts.models import User
from winupdate.models import WinUpdatePolicy
from checks.serializers import CheckRunnerGetSerializerV3
from agents.models import Agent, AgentCustomField
from agents.serializers import WinAgentSerializer
from autotasks.models import AutomatedTask
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
from winupdate.serializers import ApprovedUpdateSerializer
from agents.tasks import (
agent_recovery_email_task,
agent_recovery_sms_task,
get_wmi_detail_task,
sync_salt_modules_task,
)
from winupdate.tasks import check_for_updates_task
from software.tasks import get_installed_software, install_chocolatey
from checks.models import Check
from checks.serializers import CheckRunnerGetSerializer
from checks.utils import bytes2human
from tacticalrmm.utils import notify_error
from logs.models import PendingAction
from software.models import InstalledSoftware
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
from winupdate.models import WinUpdate, WinUpdatePolicy
logger.configure(**settings.LOG_CONFIG)
class Hello(APIView):
"""
The agent's checkin endpoint
patch: called every 30 to 120 seconds
post: called on agent windows service startup
"""
class CheckIn(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def patch(self, request):
from alerts.models import Alert
updated = False
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
if pyver.parse(request.data["version"]) > pyver.parse(
agent.version
) or pyver.parse(request.data["version"]) == pyver.parse(
settings.LATEST_AGENT_VER
):
updated = True
agent.version = request.data["version"]
agent.last_seen = djangotime.now()
agent.save(update_fields=["version", "last_seen"])
# change agent update pending status to completed if agent has just updated
if (
updated
and agent.pendingactions.filter( # type: ignore
action_type="agentupdate", status="pending"
).exists()
):
agent.pendingactions.filter( # type: ignore
action_type="agentupdate", status="pending"
).update(status="completed")
# handles any alerting actions
if Alert.objects.filter(agent=agent, resolved=False).exists():
Alert.handle_alert_resolve(agent)
# sync scheduled tasks
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
for task in tasks:
if task.sync_status == "pendingdeletion":
task.delete_task_on_agent()
elif task.sync_status == "initial":
task.modify_task_on_agent()
elif task.sync_status == "notsynced":
task.create_task_on_agent()
return Response("ok")
def put(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
disks = request.data["disks"]
new = []
# python agent
if isinstance(disks, dict):
for k, v in disks.items():
new.append(v)
else:
# golang agent
if request.data["func"] == "disks":
disks = request.data["disks"]
new = []
for disk in disks:
tmp = {}
for k, v in disk.items():
for _, _ in disk.items():
tmp["device"] = disk["device"]
tmp["fstype"] = disk["fstype"]
tmp["total"] = bytes2human(disk["total"])
@@ -72,104 +97,249 @@ class Hello(APIView):
tmp["percent"] = int(disk["percent"])
new.append(tmp)
if request.data["logged_in_username"] == "None":
serializer.save(last_seen=djangotime.now(), disks=new)
else:
serializer.save(
last_seen=djangotime.now(),
disks=new,
last_logged_in_user=request.data["logged_in_username"],
serializer.is_valid(raise_exception=True)
serializer.save(disks=new)
return Response("ok")
if request.data["func"] == "loggedonuser":
if request.data["logged_in_username"] != "None":
serializer.is_valid(raise_exception=True)
serializer.save(last_logged_in_user=request.data["logged_in_username"])
return Response("ok")
if request.data["func"] == "software":
raw: SoftwareList = request.data["software"]
if not isinstance(raw, list):
return notify_error("err")
sw = filter_software(raw)
if not InstalledSoftware.objects.filter(agent=agent).exists():
InstalledSoftware(agent=agent, software=sw).save()
else:
s = agent.installedsoftware_set.first() # type: ignore
s.software = sw
s.save(update_fields=["software"])
return Response("ok")
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("ok")
# called once during tacticalagent windows service startup
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
if not agent.choco_installed:
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
time.sleep(0.5)
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
return Response("ok")
class SyncMeshNodeID(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
if agent.mesh_node_id != request.data["nodeid"]:
agent.mesh_node_id = request.data["nodeid"]
agent.save(update_fields=["mesh_node_id"])
return Response("ok")
class Choco(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent.choco_installed = request.data["installed"]
agent.save(update_fields=["choco_installed"])
return Response("ok")
class WinUpdates(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def put(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
reboot_policy: str = agent.get_patch_policy().reboot_after_install
reboot = False
if reboot_policy == "always":
reboot = True
if request.data["needs_reboot"]:
if reboot_policy == "required":
reboot = True
elif reboot_policy == "never":
agent.needs_reboot = True
agent.save(update_fields=["needs_reboot"])
if reboot:
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
agent.delete_superseded_updates()
return Response("ok")
def patch(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
u = agent.winupdates.filter(guid=request.data["guid"]).last() # type: ignore
success: bool = request.data["success"]
if success:
u.result = "success"
u.downloaded = True
u.installed = True
u.date_installed = djangotime.now()
u.save(
update_fields=[
"result",
"downloaded",
"installed",
"date_installed",
]
)
else:
u.result = "failed"
u.save(update_fields=["result"])
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
last_outage = agent.agentoutages.last()
last_outage.recovery_time = djangotime.now()
last_outage.save(update_fields=["recovery_time"])
if agent.overdue_email_alert:
agent_recovery_email_task.delay(pk=last_outage.pk)
if agent.overdue_text_alert:
agent_recovery_sms_task.delay(pk=last_outage.pk)
recovery = agent.recoveryactions.filter(last_run=None).last()
if recovery is not None:
recovery.last_run = djangotime.now()
recovery.save(update_fields=["last_run"])
return Response(recovery.send())
# get any pending actions
if agent.pendingactions.filter(status="pending").exists():
agent.handle_pending_actions()
agent.delete_superseded_updates()
return Response("ok")
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
updates = request.data["wua_updates"]
for update in updates:
if agent.winupdates.filter(guid=update["guid"]).exists(): # type: ignore
u = agent.winupdates.filter(guid=update["guid"]).last() # type: ignore
u.downloaded = update["downloaded"]
u.installed = update["installed"]
u.save(update_fields=["downloaded", "installed"])
else:
try:
kb = "KB" + update["kb_article_ids"][0]
except:
continue
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save(last_seen=djangotime.now())
WinUpdate(
agent=agent,
guid=update["guid"],
kb=kb,
title=update["title"],
installed=update["installed"],
downloaded=update["downloaded"],
description=update["description"],
severity=update["severity"],
categories=update["categories"],
category_ids=update["category_ids"],
kb_article_ids=update["kb_article_ids"],
more_info_urls=update["more_info_urls"],
support_url=update["support_url"],
revision_number=update["revision_number"],
).save()
sync_salt_modules_task.delay(agent.pk)
get_installed_software.delay(agent.pk)
get_wmi_detail_task.delay(agent.pk)
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
)
agent.delete_superseded_updates()
if not agent.choco_installed:
install_chocolatey.delay(agent.pk, wait=True)
# more superseded updates cleanup
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
for u in agent.winupdates.filter( # type: ignore
date_installed__isnull=True, result="failed"
).exclude(installed=True):
u.delete()
return Response("ok")
class CheckRunner(APIView):
"""
For the windows golang agent
"""
class SupersededWinUpdate(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
updates = agent.winupdates.filter(guid=request.data["guid"]) # type: ignore
for u in updates:
u.delete()
return Response("ok")
class RunChecks(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
agent.last_seen = djangotime.now()
agent.save(update_fields=["last_seen"])
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
ret = {
"agent": agent.pk,
"check_interval": agent.check_interval,
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
"checks": CheckRunnerGetSerializer(checks, many=True).data,
}
return Response(ret)
class CheckRunner(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
checks = agent.agentchecks.filter(overriden_by_policy=False) # type: ignore
run_list = [
check
for check in checks
# always run if check hasn't run yet
if not check.last_run
# if a check interval is set, see if the correct amount of seconds have passed
or (
check.run_interval
and (
check.last_run
< djangotime.now()
- djangotime.timedelta(seconds=check.run_interval)
)
# if check interval isn't set, make sure the agent's check interval has passed before running
)
or (
check.last_run
< djangotime.now() - djangotime.timedelta(seconds=agent.check_interval)
)
]
ret = {
"agent": agent.pk,
"check_interval": agent.check_run_interval(),
"checks": CheckRunnerGetSerializer(run_list, many=True).data,
}
return Response(ret)
def patch(self, request):
from logs.models import AuditLog
check = get_object_or_404(Check, pk=request.data["id"])
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
status = check.handle_checkv2(request.data)
# create audit entry
AuditLog.objects.create(
username=check.agent.hostname,
agent=check.agent.hostname,
object_type="agent",
action="check_run",
message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}",
after_value=Check.serialize(check),
)
return Response(status)
class TaskRunner(APIView):
"""
For the windows golang agent
"""
class CheckRunnerInterval(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
return Response(
{"agent": agent.pk, "check_interval": agent.check_run_interval()}
)
class TaskRunner(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
@@ -179,6 +349,7 @@ class TaskRunner(APIView):
return Response(TaskGOGetSerializer(task).data)
def patch(self, request, pk, agentid):
from alerts.models import Alert
from logs.models import AuditLog
agent = get_object_or_404(Agent, agent_id=agentid)
@@ -188,9 +359,51 @@ class TaskRunner(APIView):
instance=task, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save(last_run=djangotime.now())
new_task = serializer.save(last_run=djangotime.now())
# check if task is a collector and update the custom field
if task.custom_field:
if not task.stderr:
if AgentCustomField.objects.filter(
field=task.custom_field, agent=task.agent
).exists():
agent_field = AgentCustomField.objects.get(
field=task.custom_field, agent=task.agent
)
else:
agent_field = AgentCustomField.objects.create(
field=task.custom_field, agent=task.agent
)
# get last line of stdout
value = new_task.stdout.split("\n")[-1].strip()
if task.custom_field.type in ["text", "number", "single", "datetime"]:
agent_field.string_value = value
agent_field.save()
elif task.custom_field.type == "multiple":
agent_field.multiple_value = value.split(",")
agent_field.save()
elif task.custom_field.type == "checkbox":
agent_field.bool_value = bool(value)
agent_field.save()
status = "passing"
else:
status = "failing"
else:
status = "failing" if task.retcode != 0 else "passing"
new_task.status = status
new_task.save()
if status == "passing":
if Alert.objects.filter(assigned_task=new_task, resolved=False).exists():
Alert.handle_alert_resolve(new_task)
else:
Alert.handle_alert_failure(new_task)
new_task = AutomatedTask.objects.get(pk=task.pk)
AuditLog.objects.create(
username=agent.hostname,
agent=agent.hostname,
@@ -203,159 +416,6 @@ class TaskRunner(APIView):
return Response("ok")
class SaltMinion(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
ret = {
"latestVer": settings.LATEST_SALT_VER,
"currentVer": agent.salt_ver,
"salt_id": agent.salt_id,
"downloadURL": agent.winsalt_dl,
}
return Response(ret)
def post(self, request):
# accept the salt key
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
if agent.salt_id != request.data["saltid"]:
return notify_error("Salt keys do not match")
try:
resp = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "wheel",
"fun": "key.accept",
"match": request.data["saltid"],
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=30,
)
except Exception:
return notify_error("No communication between agent and salt-api")
try:
data = resp.json()["return"][0]["data"]
minion = data["return"]["minions"][0]
except Exception:
return notify_error("Key error")
if data["success"] and minion == request.data["saltid"]:
return Response("Salt key was accepted")
else:
return notify_error("Not accepted")
def patch(self, request):
# sync modules
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
if r == "timeout" or r == "error":
return notify_error("Failed to sync salt modules")
if isinstance(r, list) and any("modules" in i for i in r):
return Response("Successfully synced salt modules")
elif isinstance(r, list) and not r:
return Response("Modules are already in sync")
else:
return notify_error(f"Failed to sync salt modules: {str(r)}")
def put(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent.salt_ver = request.data["ver"]
agent.save(update_fields=["salt_ver"])
return Response("ok")
class WinUpdater(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
agent.delete_superseded_updates()
patches = agent.winupdates.filter(action="approve").exclude(installed=True)
return Response(ApprovedUpdateSerializer(patches, many=True).data)
# agent sends patch results as it's installing them
def patch(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
kb = request.data["kb"]
results = request.data["results"]
update = agent.winupdates.get(kb=kb)
if results == "error" or results == "failed":
update.result = results
update.save(update_fields=["result"])
elif results == "success":
update.result = "success"
update.downloaded = True
update.installed = True
update.date_installed = djangotime.now()
update.save(
update_fields=[
"result",
"downloaded",
"installed",
"date_installed",
]
)
elif results == "alreadyinstalled":
update.result = "success"
update.downloaded = True
update.installed = True
update.save(update_fields=["result", "downloaded", "installed"])
return Response("ok")
# agent calls this after it's finished installing all patches
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
reboot_policy = agent.get_patch_policy().reboot_after_install
reboot = False
if reboot_policy == "always":
reboot = True
if request.data["reboot"]:
if reboot_policy == "required":
reboot = True
elif reboot_policy == "never":
agent.needs_reboot = True
agent.save(update_fields=["needs_reboot"])
if reboot:
r = agent.salt_api_cmd(
timeout=15,
func="system.reboot",
arg=7,
kwargs={"in_seconds": True},
)
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
else:
logger.info(
f"{agent.hostname} is rebooting after updates were installed."
)
else:
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
return Response("ok")
class SysInfo(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
@@ -371,23 +431,8 @@ class SysInfo(APIView):
return Response("ok")
class MeshInfo(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
return Response(agent.mesh_node_id)
def patch(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
agent.mesh_node_id = request.data["nodeidhex"]
agent.save(update_fields=["mesh_node_id"])
return Response("ok")
class MeshExe(APIView):
""" Sends the mesh exe to the installer """
"""Sends the mesh exe to the installer"""
def post(self, request):
exe = "meshagent.exe" if request.data["arch"] == "64" else "meshagent-x86.exe"
@@ -435,10 +480,10 @@ class NewAgent(APIView):
agent.salt_id = f"{agent.hostname}-{agent.pk}"
agent.save(update_fields=["salt_id"])
user = User.objects.create_user(
user = User.objects.create_user( # type: ignore
username=request.data["agent_id"],
agent=agent,
password=User.objects.make_random_password(60),
password=User.objects.make_random_password(60), # type: ignore
)
token = Token.objects.create(user=user)
@@ -448,9 +493,7 @@ class NewAgent(APIView):
else:
WinUpdatePolicy(agent=agent).save()
# Generate policies for new agent
agent.generate_checks_from_policies()
agent.generate_tasks_from_policies()
reload_nats()
# create agent install audit record
AuditLog.objects.create(
@@ -469,3 +512,98 @@ class NewAgent(APIView):
"token": token.key,
}
)
class Software(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
raw: SoftwareList = request.data["software"]
if not isinstance(raw, list):
return notify_error("err")
sw = filter_software(raw)
if not InstalledSoftware.objects.filter(agent=agent).exists():
InstalledSoftware(agent=agent, software=sw).save()
else:
s = agent.installedsoftware_set.first() # type: ignore
s.software = sw
s.save(update_fields=["software"])
return Response("ok")
class Installer(APIView):
def get(self, request):
# used to check if token is valid. will return 401 if not
return Response("ok")
def post(self, request):
if "version" not in request.data:
return notify_error("Invalid data")
ver = request.data["version"]
if pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER):
return notify_error(
f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM"
)
return Response("ok")
class ChocoResult(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def patch(self, request, pk):
action = get_object_or_404(PendingAction, pk=pk)
results: str = request.data["results"]
software_name = action.details["name"].lower()
success = [
"install",
"of",
software_name,
"was",
"successful",
"installed",
]
duplicate = [software_name, "already", "installed", "--force", "reinstall"]
installed = False
if all(x in results.lower() for x in success):
installed = True
elif all(x in results.lower() for x in duplicate):
installed = True
action.details["output"] = results
action.details["installed"] = installed
action.status = "completed"
action.save(update_fields=["details", "status"])
return Response("ok")
class AgentRecovery(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
recovery = agent.recoveryactions.filter(last_run=None).last() # type: ignore
ret = {"mode": "pass", "shellcmd": ""}
if recovery is None:
return Response(ret)
recovery.last_run = djangotime.now()
recovery.save(update_fields=["last_run"])
ret["mode"] = recovery.mode
if recovery.mode == "command":
ret["shellcmd"] = recovery.command
elif recovery.mode == "rpc":
reload_nats()
return Response(ret)

Some files were not shown because too many files have changed in this diff Show More