Compare commits

...

345 Commits

Author SHA1 Message Date
wh1te909
01ee524049 Release 0.4.3 2021-01-30 04:45:10 +00:00
wh1te909
af9cb65338 bump version 2021-01-30 04:44:41 +00:00
wh1te909
8aa11c580b move agents monitor task to go 2021-01-30 04:39:15 +00:00
wh1te909
ada627f444 forgot to enable natsapi during install 2021-01-30 04:28:27 +00:00
wh1te909
a7b6d338c3 update reqs 2021-01-30 02:06:56 +00:00
wh1te909
9f00538b97 fix tests 2021-01-29 23:38:59 +00:00
wh1te909
a085015282 increase timeout for security eventlogs 2021-01-29 23:34:16 +00:00
wh1te909
0b9c220fbb remove old task 2021-01-29 20:36:28 +00:00
wh1te909
0e3d04873d move wmi celery task to golang 2021-01-29 20:10:52 +00:00
wh1te909
b7578d939f add test for community script shell type 2021-01-29 09:37:34 +00:00
wh1te909
b5c28de03f Release 0.4.2 2021-01-29 08:23:06 +00:00
wh1te909
e17d25c156 bump versions 2021-01-29 08:12:03 +00:00
wh1te909
c25dc1b99c also override shell during load community scripts 2021-01-29 07:39:08 +00:00
Tragic Bronson
a493a574bd Merge pull request #265 from saulens22/patch-1
Fix "TRMM Defender Exclusions" script shell type
2021-01-28 23:36:03 -08:00
Saulius Kazokas
4284493dce Fix "TRMM Defender Exclusions" script shell type 2021-01-29 07:10:10 +02:00
wh1te909
25059de8e1 fix superseded windows defender updates 2021-01-29 02:37:51 +00:00
wh1te909
1731b05ad0 remove old serializers 2021-01-29 02:25:31 +00:00
wh1te909
e80dc663ac remove unused func 2021-01-29 02:22:06 +00:00
wh1te909
39988a4c2f cleanup an old view 2021-01-29 02:15:27 +00:00
wh1te909
415bff303a add some debug for unsupported agents 2021-01-29 01:22:35 +00:00
wh1te909
a65eb62a54 checkrunner changes wh1te909/rmmagent@10a0935f1b 2021-01-29 00:34:18 +00:00
wh1te909
03b2982128 update build flags 2021-01-28 23:11:32 +00:00
wh1te909
bff0527857 Release 0.4.1 2021-01-27 07:48:14 +00:00
wh1te909
f3b7634254 fix tests 2021-01-27 07:45:00 +00:00
wh1te909
6a9593c0b9 bump versions 2021-01-27 07:35:11 +00:00
wh1te909
edb785b8e5 prepare for agent 1.4.0 2021-01-27 07:11:49 +00:00
wh1te909
26d757b50a checkrunner interval changes wh1te909/rmmagent@7f131d54cf 2021-01-27 06:38:42 +00:00
wh1te909
535079ee87 update natsapi 2021-01-26 20:54:30 +00:00
wh1te909
ac380c29c1 fix last response sorting closes #258 2021-01-26 19:58:08 +00:00
wh1te909
3fd212f26c more optimizations 2021-01-25 21:05:59 +00:00
wh1te909
04a3abc651 fix tests 2021-01-25 20:46:22 +00:00
wh1te909
6caf85ddd1 optimize some queries 2021-01-25 20:27:20 +00:00
wh1te909
16e4071508 use error msg from backend 2021-01-25 19:57:50 +00:00
wh1te909
69e7c4324b start mkdocs 2021-01-25 19:55:48 +00:00
wh1te909
a1c4a8cbe5 fix tab refresh 2021-01-23 06:27:33 +00:00
wh1te909
e37f6cfda7 Release 0.4.0 2021-01-23 03:46:22 +00:00
wh1te909
989c804409 bump version 2021-01-23 03:45:49 +00:00
sadnub
7345bc3c82 fix image build script 2021-01-22 20:04:30 -05:00
sadnub
69bee35700 remove winupdate container from dev 2021-01-22 20:03:30 -05:00
sadnub
598e24df7c remove salt and celery-winupdate containers 2021-01-22 19:57:58 -05:00
sadnub
0ae669201e Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-01-22 19:26:03 -05:00
wh1te909
f52a8a4642 black 2021-01-23 00:02:26 +00:00
wh1te909
9c40b61ef2 fix test 2021-01-22 23:41:10 +00:00
wh1te909
72dabcda83 fix a test 2021-01-22 23:29:18 +00:00
wh1te909
161a06dbcc don't change tab when using site refresh button 2021-01-22 23:27:28 +00:00
wh1te909
8ed3d4e70c update quasar 2021-01-22 23:26:44 +00:00
wh1te909
a4223ccc8a bump agent and mesh vers 2021-01-22 22:56:33 +00:00
wh1te909
ca85923855 add purge 2021-01-22 09:34:08 +00:00
wh1te909
52bfe7c493 update natsapi 2021-01-22 00:41:27 +00:00
wh1te909
4786bd0cbe create meshusername during install 2021-01-22 00:40:09 +00:00
wh1te909
cadab160ff add check to remove salt 2021-01-21 23:58:31 +00:00
wh1te909
6a7f17b2b0 more salt cleanup 2021-01-21 00:00:34 +00:00
wh1te909
4986a4d775 more salt cleanup 2021-01-20 23:22:02 +00:00
wh1te909
903af0c2cf goodbye salt, you've served us well 2021-01-20 22:11:54 +00:00
wh1te909
3282fa803c move to go for chocolatey wh1te909/rmmagent@cebde22fa0 2021-01-19 23:43:37 +00:00
wh1te909
67cc47608d add hosts check to migration doc 2021-01-19 23:25:35 +00:00
wh1te909
0411704b8b update rmmagent and resty 2021-01-19 23:10:50 +00:00
wh1te909
1de85b2c69 more winupdate rework wh1te909/rmmagent@08ec2f9191 2021-01-19 03:14:54 +00:00
wh1te909
33b012f29d typo 2021-01-19 03:11:07 +00:00
wh1te909
1357584df3 start winupdate rework 2021-01-19 00:59:38 +00:00
sadnub
e15809e271 Merge branch 'develop' of https://github.com/sadnub/tacticalrmm into develop 2021-01-18 09:17:17 -05:00
wh1te909
0da1950427 Release 0.3.3 2021-01-18 11:01:25 +00:00
wh1te909
e590b921be fix #252 2021-01-18 11:00:50 +00:00
wh1te909
09462692f5 Release 0.3.2 2021-01-18 10:00:45 +00:00
wh1te909
c1d1b5f762 bump version 2021-01-18 10:00:26 +00:00
wh1te909
6b9c87b858 feat: set agent table tab default #249 2021-01-18 09:57:50 +00:00
wh1te909
485b6eb904 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-01-18 09:32:00 +00:00
wh1te909
057630bdb5 fix agent table sort #250 2021-01-18 09:31:28 +00:00
wh1te909
6b02873b30 fix agent table sort #250 2021-01-18 09:12:01 +00:00
wh1te909
0fa0fc6d6b add json linter to migration docs 2021-01-17 18:09:47 +00:00
wh1te909
339ec07465 Release 0.3.1 2021-01-17 05:48:27 +00:00
wh1te909
cd2e798fea bump versions 2021-01-17 05:43:34 +00:00
wh1te909
d5cadbeae2 split agent update into chunks 2021-01-17 05:42:38 +00:00
wh1te909
8046a3ccae Release 0.3.0 2021-01-17 02:16:06 +00:00
wh1te909
bf91d60b31 natsapi bin 1.0.0 2021-01-17 02:07:53 +00:00
wh1te909
539c047ec8 update go 2021-01-17 01:53:45 +00:00
wh1te909
290c18fa87 bump versions 2021-01-17 01:22:08 +00:00
wh1te909
98c46f5e57 fix domain 2021-01-17 01:21:21 +00:00
wh1te909
f8bd5b5b4e update configs/scripts and add migration docs for 0.3.0 2021-01-17 01:16:28 +00:00
wh1te909
816d32edad black 2021-01-16 23:34:55 +00:00
wh1te909
8453835c05 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-01-16 23:32:54 +00:00
wh1te909
9328c356c8 possible fix for mesh scaling 2021-01-16 23:32:46 +00:00
sadnub
89e3c1fc94 remove my print statements 2021-01-16 17:46:56 -05:00
sadnub
67e54cd15d Remove pending action duplicates and make policy check/task propogation more efficient 2021-01-16 17:46:56 -05:00
sadnub
278ea24786 improve dev env 2021-01-16 17:46:56 -05:00
sadnub
aba1662631 remove my print statements 2021-01-16 17:46:30 -05:00
sadnub
61eeb60c19 Remove pending action duplicates and make policy check/task propogation more efficient 2021-01-16 17:44:27 -05:00
wh1te909
5e9a8f4806 new natsapi binary 2021-01-16 21:55:06 +00:00
wh1te909
4cb274e9bc update to celery 5 2021-01-16 21:52:30 +00:00
wh1te909
8b9b1a6a35 update mesh docker conf 2021-01-16 21:50:29 +00:00
sadnub
2655964113 improve dev env 2021-01-16 11:20:24 -05:00
wh1te909
188bad061b add wmi task 2021-01-16 10:31:00 +00:00
wh1te909
3af4c329aa update reqs 2021-01-16 09:42:03 +00:00
wh1te909
6c13395f7d add debug 2021-01-16 09:41:27 +00:00
wh1te909
77b32ba360 remove import 2021-01-16 09:39:15 +00:00
sadnub
91dba291ac nats-api fixes 2021-01-15 23:41:21 -05:00
sadnub
a6bc293640 Finish up check charts 2021-01-15 22:11:40 -05:00
sadnub
53882d6e5f fix dev port 2021-01-15 21:25:32 -05:00
sadnub
d68adfbf10 docker nats-api rework 2021-01-15 21:11:27 -05:00
sadnub
498a392d7f check graphs wip 2021-01-15 21:10:25 -05:00
sadnub
740f6c05db docker cli additions 2021-01-15 21:10:25 -05:00
wh1te909
d810ce301f update natsapi flags 2021-01-16 00:01:31 +00:00
wh1te909
5ef6a14d24 add nats-api binary 2021-01-15 18:21:25 +00:00
wh1te909
a13f6f1e68 move recovery to natsapi 2021-01-15 10:19:01 +00:00
wh1te909
d2d0f1aaee fix tests 2021-01-15 09:57:46 +00:00
wh1te909
e64c72cc89 #234 sort proc mem using bytes wh1te909/rmmagent@04470dd4ce 2021-01-15 09:44:18 +00:00
wh1te909
9ab915a08b Release 0.2.23 2021-01-14 02:43:56 +00:00
wh1te909
e26fbf0328 bump versions 2021-01-14 02:29:14 +00:00
wh1te909
d9a52c4a2a update reqs 2021-01-14 02:27:40 +00:00
wh1te909
7b2ec90de9 feat: double-click agent action #232 2021-01-14 02:21:08 +00:00
wh1te909
d310bf8bbf add community scripts from dinger #242 2021-01-14 01:17:58 +00:00
wh1te909
2abc6cc939 partially fix sort 2021-01-14 00:01:08 +00:00
sadnub
56d4e694a2 fix annotations and error for the check chart 2021-01-13 18:43:09 -05:00
wh1te909
5f002c9cdc bump mesh 2021-01-13 23:35:14 +00:00
wh1te909
759daf4b4a add wording 2021-01-13 23:35:01 +00:00
wh1te909
3a8d9568e3 split some tasks into chunks to reduce load 2021-01-13 22:26:54 +00:00
wh1te909
ff22a9d94a fix deployments in docker 2021-01-13 22:19:09 +00:00
sadnub
a6e42d5374 fix removing pendingactions that are outstanding 2021-01-13 13:21:09 -05:00
wh1te909
a2f74e0488 add natsapi flags 2021-01-12 21:14:43 +00:00
wh1te909
ee44240569 black 2021-01-12 21:06:44 +00:00
wh1te909
d0828744a2 update nginx conf
(cherry picked from commit bf61e27f8a)
2021-01-12 06:38:52 +00:00
wh1te909
6e2e576b29 start natsapi 2021-01-12 06:32:00 +00:00
wh1te909
bf61e27f8a update nginx conf 2021-01-12 03:02:03 +00:00
Tragic Bronson
c441c30b46 Merge pull request #243 from sadnub/develop
Move Check Runs from Audit to its own table
2021-01-11 00:29:59 -08:00
Tragic Bronson
0e741230ea Merge pull request #242 from dinger1986/develop
Added some scripts checks etc
2021-01-11 00:29:47 -08:00
sadnub
1bfe9ac2db complete other pending actions with same task if task is deleted 2021-01-10 20:19:38 -05:00
sadnub
6812e72348 fix process sorting 2021-01-10 19:35:39 -05:00
sadnub
b6449d2f5b black 2021-01-10 16:33:10 -05:00
sadnub
7e3ea20dce add some tests and bug fixes 2021-01-10 16:27:48 -05:00
sadnub
c9d6fe9dcd allow returning all check data 2021-01-10 15:14:02 -05:00
sadnub
4a649a6b8b black 2021-01-10 14:47:34 -05:00
sadnub
8fef184963 add check history graph for cpu, memory, and diskspace 2021-01-10 14:15:05 -05:00
sadnub
69583ca3c0 docker dev fixes 2021-01-10 13:17:49 -05:00
dinger1986
6038a68e91 Win Defender exclusions for Tactical 2021-01-10 17:56:12 +00:00
dinger1986
fa8bd8db87 Manually reinstall Mesh just incase 2021-01-10 17:54:41 +00:00
dinger1986
18b4f0ed0f Runs DNS check on host as defined 2021-01-10 17:53:53 +00:00
dinger1986
461f9d66c9 Disable Faststartup on Windows 10 2021-01-10 17:51:33 +00:00
dinger1986
2155103c7a Check Win Defender for detections etc 2021-01-10 17:51:06 +00:00
dinger1986
c9a6839c45 Clears Win Defender log files 2021-01-10 17:50:13 +00:00
dinger1986
9fbe331a80 Allows the following Apps access by Win Defender 2021-01-10 17:49:36 +00:00
dinger1986
a56389c4ce Sync time with DC 2021-01-10 17:46:47 +00:00
dinger1986
64656784cb Powershell Speedtest 2021-01-10 17:46:00 +00:00
dinger1986
6eff2c181e Install RDP and change power config 2021-01-10 17:44:23 +00:00
dinger1986
1aa48c6d62 Install OpenSSH on PCs 2021-01-10 17:42:11 +00:00
dinger1986
c7ca1a346d Enable Windows Defender and set preferences 2021-01-10 17:40:06 +00:00
dinger1986
fa0ec7b502 check Duplicati Backup is running properly 2021-01-10 17:38:06 +00:00
dinger1986
768438c136 Checks disks for errors reported in event viewer 2021-01-10 17:36:42 +00:00
dinger1986
9badea0b3c Update DiskStatus.ps1
Checks local disks for errors reported in event viewer within the last 24 hours
2021-01-10 17:35:50 +00:00
dinger1986
43263a1650 Add files via upload 2021-01-10 17:33:48 +00:00
wh1te909
821e02dc75 update mesh docker conf 2021-01-10 00:20:44 +00:00
wh1te909
ed011ecf28 remove old mesh overrides #217 2021-01-10 00:15:11 +00:00
wh1te909
d861de4c2f update community scripts 2021-01-09 22:26:02 +00:00
Tragic Bronson
3a3b2449dc Merge pull request #241 from RVL-Solutions/develop
Create Windows10Upgrade.ps1
2021-01-09 14:12:05 -08:00
Ruben van Leusden
d2614406ca Create Windows10Upgrade.ps1
Shared by Kyt through Discord
2021-01-08 22:20:33 +01:00
Tragic Bronson
0798d098ae Merge pull request #238 from wh1te909/revert-235-master
Revert "Create Windows10Upgrade.ps1"
2021-01-08 10:38:33 -08:00
Tragic Bronson
dab7ddc2bb Revert "Create Windows10Upgrade.ps1" 2021-01-08 10:36:42 -08:00
Tragic Bronson
081a96e281 Merge pull request #235 from RVL-Solutions/master
Create Windows10Upgrade.ps1
2021-01-08 10:36:19 -08:00
wh1te909
a7dd881d79 Release 0.2.22 2021-01-08 18:16:17 +00:00
wh1te909
8134d5e24d remove threading 2021-01-08 18:15:55 +00:00
Ruben van Leusden
ba6756cd45 Create Windows10Upgrade.ps1 2021-01-06 23:19:14 +01:00
Tragic Bronson
5d8fce21ac Merge pull request #230 from wh1te909/dependabot/npm_and_yarn/web/axios-0.21.1
Bump axios from 0.21.0 to 0.21.1 in /web
2021-01-05 13:51:18 -08:00
dependabot[bot]
e7e4a5bcd4 Bump axios from 0.21.0 to 0.21.1 in /web
Bumps [axios](https://github.com/axios/axios) from 0.21.0 to 0.21.1.
- [Release notes](https://github.com/axios/axios/releases)
- [Changelog](https://github.com/axios/axios/blob/v0.21.1/CHANGELOG.md)
- [Commits](https://github.com/axios/axios/compare/v0.21.0...v0.21.1)

Signed-off-by: dependabot[bot] <support@github.com>
2021-01-05 15:54:54 +00:00
wh1te909
55f33357ea Release 0.2.21 2021-01-05 08:55:54 +00:00
wh1te909
90568bba31 bump versions 2021-01-05 08:55:08 +00:00
wh1te909
5d6e2dc2e4 feat: add send script results by email #212 2021-01-05 08:52:17 +00:00
sadnub
6bb33f2559 fix unassigned scripts not show if not categories are present 2021-01-04 20:22:42 -05:00
wh1te909
ced92554ed update community scripts 2021-01-04 22:00:17 +00:00
Tragic Bronson
dff3383158 Merge pull request #228 from azulskyknight/patch-2
Create SetHighPerformancePowerProfile.ps1
2021-01-04 13:42:20 -08:00
Tragic Bronson
bf03c89cb2 Merge pull request #227 from azulskyknight/patch-1
Create ResetHighPerformancePowerProfiletoDefaults.ps1
2021-01-04 13:42:10 -08:00
azulskyknight
9f1484bbef Create SetHighPerformancePowerProfile.ps1
Script sets the High Performance Power profile to the active power profile.
Use this to keep machines from falling asleep.
2021-01-04 13:21:00 -07:00
azulskyknight
3899680e26 Create ResetHighPerformancePowerProfiletoDefaults.ps1
Script resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values.
It also re-indexes the AC and DC power profiles into their default order.
2021-01-04 13:19:03 -07:00
sadnub
6bb2eb25a1 sort script folders alphabetically and fix showing community scripts when no user scripts present 2021-01-03 21:01:50 -05:00
sadnub
f8dfd8edb3 Make pip copy the binaries versus symlink them in dev env 2021-01-03 20:15:40 -05:00
sadnub
042be624a3 Update .dockerignore 2021-01-03 15:16:13 -05:00
sadnub
6bafa4c79a fix mesh init on dev 2021-01-03 15:15:43 -05:00
wh1te909
58b42fac5c Release 0.2.20 2021-01-03 09:13:28 +00:00
wh1te909
3b47b9558a let python calculate default threadpool workers based on cpu count 2021-01-03 09:12:38 +00:00
wh1te909
ccf9636296 Release 0.2.19 2021-01-02 09:34:12 +00:00
wh1te909
96942719f2 bump versions 2021-01-02 09:32:04 +00:00
wh1te909
69cf1c1adc update quasar 2021-01-02 07:38:33 +00:00
wh1te909
d77cba40b8 black 2021-01-02 07:26:34 +00:00
wh1te909
968735b555 fix scroll 2021-01-02 07:21:10 +00:00
wh1te909
ceed9d29eb task changes 2021-01-02 07:20:52 +00:00
sadnub
41329039ee add .env example 2021-01-02 00:09:56 -05:00
sadnub
f68b102ca8 Add Dev Containers 2021-01-02 00:05:54 -05:00
wh1te909
fa36e54298 change agent update 2021-01-02 01:30:51 +00:00
wh1te909
b689f57435 black 2021-01-01 00:51:44 +00:00
sadnub
885fa0ff56 add api tests to core app 2020-12-31 17:18:25 -05:00
Tragic Bronson
303acb72a3 Merge pull request #225 from sadnub/develop
add folder view to script manager
2020-12-31 13:12:33 -08:00
sadnub
b2a46cd0cd add folder view to script manager 2020-12-31 15:46:44 -05:00
wh1te909
5a5ecb3ee3 install curl/wget first fixes #224 2020-12-30 19:04:14 +00:00
wh1te909
60b4ab6a63 fix logging 2020-12-22 05:15:44 +00:00
wh1te909
e4b096a08f fix logging 2020-12-22 05:14:44 +00:00
wh1te909
343f55049b prevent duplicate cpu/mem checks from being created 2020-12-19 20:38:22 +00:00
wh1te909
6b46025261 Release 0.2.18 2020-12-19 08:44:45 +00:00
wh1te909
5ea503f23e bump version 2020-12-19 08:43:47 +00:00
wh1te909
ce95f9ac23 add codestyle to tests 2020-12-19 08:24:47 +00:00
wh1te909
c3fb87501b black 2020-12-19 08:20:12 +00:00
wh1te909
dc6a343612 bump mesh 2020-12-19 07:55:39 +00:00
wh1te909
3a61053957 update reqs 2020-12-19 07:50:32 +00:00
wh1te909
570129e4d4 add debian 10 to readme 2020-12-19 07:50:05 +00:00
wh1te909
3315c7045f if ubuntu, force 20.04 2020-12-19 07:45:21 +00:00
wh1te909
5ae50e242c always run npm install during update 2020-12-18 21:59:23 +00:00
Tragic Bronson
bbcf449719 Merge pull request #214 from mckinnon81/debian
Updated install.sh for Debian
2020-12-18 13:56:14 -08:00
Matthew McKinnon
aab10f7184 Removed certbot test-cert. Not needed 2020-12-18 08:32:40 +10:00
Matthew McKinnon
8d43488cb8 Updated install.sh for Debian
Updated api\tacticalrmm\accounts\views.py valid_window=10
2020-12-18 08:28:01 +10:00
Tragic Bronson
0a9c647e19 Merge pull request #211 from sadnub/develop
Fix default policies
2020-12-16 13:51:37 -08:00
wh1te909
40db5d4aa8 remove debug print 2020-12-16 21:50:43 +00:00
Josh
9254532baa fix applying default policies in certain situations 2020-12-16 20:38:36 +00:00
Josh
7abed47cf0 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2020-12-16 19:08:12 +00:00
Tragic Bronson
5c6ac758f7 Merge pull request #210 from mckinnon81/scripts
Fixed Paths in ClearFirefoxCache.ps1 & ClearGoogleChromeCache.ps1
2020-12-16 09:36:33 -08:00
Matthew McKinnon
007677962c Fixed Paths in ClearFirefoxCache.ps1 & ClearGoogleChromeCache.ps1 2020-12-16 22:32:04 +10:00
wh1te909
9c4aeab64a back to develop 2020-12-16 10:47:05 +00:00
wh1te909
48e6fc0efe test coveralls 2 2020-12-16 10:41:39 +00:00
wh1te909
c8be713d11 test coveralls 2020-12-16 10:38:00 +00:00
wh1te909
ae887c8648 switch to branch head for coveralls 2020-12-16 10:20:50 +00:00
wh1te909
5daac2531b add accounts tests for new settings 2020-12-16 10:09:58 +00:00
wh1te909
68def00327 fix tests 2020-12-16 09:40:36 +00:00
wh1te909
67e7976710 pipelines attempt 2 2020-12-16 09:25:28 +00:00
wh1te909
35747e937e try to get pipelines to fail 2020-12-16 09:10:53 +00:00
wh1te909
fb439787a4 Release 0.2.17 2020-12-16 00:37:59 +00:00
wh1te909
8fa368f473 bump versions 2020-12-16 00:36:43 +00:00
sadnub
c84a9d07b1 tactical-cli for managing docker installations 2020-12-15 13:41:03 -05:00
wh1te909
7fb46cdfc4 add more targeting options to bulk actions 2020-12-15 08:30:55 +00:00
Tragic Bronson
52985e5ddc Merge pull request #203 from wh1te909/dependabot/npm_and_yarn/docs/ini-1.3.8
Bump ini from 1.3.5 to 1.3.8 in /docs
2020-12-15 00:10:01 -08:00
wh1te909
e880935dc3 make script name required 2020-12-15 07:37:37 +00:00
wh1te909
cc22b1bca5 send favorite data when adding new script 2020-12-15 07:37:09 +00:00
wh1te909
49a5128918 remove extra migrations already handled by another func 2020-12-15 05:06:33 +00:00
wh1te909
fedc7dcb44 #204 add optional setting to prevent initial admin user from being modified or deleted 2020-12-14 21:00:25 +00:00
wh1te909
cd32b20215 remove vue tests for now 2020-12-14 20:59:43 +00:00
wh1te909
15cd9832c4 change fav script context menu style 2020-12-14 20:41:07 +00:00
wh1te909
f25d4e4553 add agent recovery periodic task 2020-12-14 19:27:09 +00:00
Tragic Bronson
12d1c82b63 Merge pull request #200 from sadnub/develop
Scripts Manager Rework
2020-12-14 10:35:19 -08:00
wh1te909
aebe855078 add a favorite menu to agent's context menu for easy way to run scripts 2020-12-14 11:28:00 +00:00
wh1te909
3416a71ebd add community scripts to migration 2020-12-14 07:17:51 +00:00
Tragic Bronson
94b3fea528 Create FUNDING.yml 2020-12-13 20:57:05 -08:00
Josh
ad1a9ecca1 fix agent table pending actions filter 2020-12-14 04:39:42 +00:00
Josh
715accfb8a scripts rework 2020-12-14 04:39:02 +00:00
wh1te909
a8e03c6138 Release 0.2.16 2020-12-13 11:46:12 +00:00
wh1te909
f69446b648 agent 1.1.11 wh1te909/rmmagent@f693d15322 2020-12-13 11:45:24 +00:00
dependabot[bot]
eedfbe5846 Bump ini from 1.3.5 to 1.3.8 in /docs
Bumps [ini](https://github.com/isaacs/ini) from 1.3.5 to 1.3.8.
- [Release notes](https://github.com/isaacs/ini/releases)
- [Commits](https://github.com/isaacs/ini/compare/v1.3.5...v1.3.8)

Signed-off-by: dependabot[bot] <support@github.com>
2020-12-13 07:18:22 +00:00
wh1te909
153351cc9f Release 0.2.15 2020-12-12 09:40:08 +00:00
wh1te909
1b1eec40a7 agent check-in and recovery improvements 2020-12-12 09:39:20 +00:00
wh1te909
763877541a Release 0.2.14 2020-12-12 01:59:47 +00:00
wh1te909
1fad7d72a2 fix for special chars in computer hostname closes #201 2020-12-12 01:59:10 +00:00
wh1te909
51ea2ea879 Release 0.2.13 2020-12-11 20:48:11 +00:00
wh1te909
d77a478bf0 agent 1.1.8 2020-12-11 20:47:54 +00:00
wh1te909
e413c0264a Release 0.2.12 2020-12-11 07:28:27 +00:00
wh1te909
f88e7f898c bump versions 2020-12-11 07:27:42 +00:00
wh1te909
d07bd4a6db add optional silent flag to installer 2020-12-11 07:25:42 +00:00
wh1te909
fb34c099d5 Release 0.2.11 2020-12-10 19:13:24 +00:00
wh1te909
1d2ee56a15 bump versions 2020-12-10 19:12:30 +00:00
wh1te909
86665f7f09 change update task for agent 1.1.6 2020-12-10 19:08:29 +00:00
wh1te909
0d2b4af986 Release 0.2.10 2020-12-10 10:34:40 +00:00
wh1te909
dc2b2eeb9f bump versions 2020-12-10 10:33:44 +00:00
wh1te909
e5dbb66d53 cleanup agent update func 2020-12-10 10:31:58 +00:00
wh1te909
3474b1c471 fix failing checks alert 2020-12-10 00:01:54 +00:00
wh1te909
3886de5b7c add postgres vacuum 2020-12-10 00:00:02 +00:00
wh1te909
2b3cec06b3 Release 0.2.9 2020-12-09 05:07:11 +00:00
wh1te909
8536754d14 bump version for new agent 2020-12-09 05:06:19 +00:00
wh1te909
1f36235801 fix wording 2020-12-09 05:04:25 +00:00
wh1te909
a4194b14f9 Release 0.2.8 2020-12-09 00:50:48 +00:00
wh1te909
2dcc629d9d bump versions 2020-12-09 00:31:33 +00:00
wh1te909
98ddadc6bc add sync task 2020-12-08 23:02:05 +00:00
wh1te909
f6e47b7383 remove extra services view 2020-12-08 20:09:09 +00:00
wh1te909
f073ddc906 Release 0.2.7 2020-12-07 09:50:37 +00:00
wh1te909
3e00631925 cleanup older pending action agent updates if one exists with an older agent version 2020-12-07 09:50:15 +00:00
wh1te909
9b7ac58562 Release 0.2.6 2020-12-07 08:56:20 +00:00
wh1te909
f242ddd801 bump versions 2020-12-07 08:55:49 +00:00
wh1te909
c129886fe2 change sleeps 2020-12-07 08:30:21 +00:00
wh1te909
f577e814cf add refresh summary 2020-12-07 08:29:37 +00:00
wh1te909
c860a0cedd update reqs 2020-12-07 00:35:38 +00:00
wh1te909
ae7e28e492 try fixing coveralls branch 2020-12-06 00:43:36 +00:00
wh1te909
90a63234ad add coveralls 2020-12-04 06:40:44 +00:00
wh1te909
14bca52e8f remove dead code, update middleware 2020-12-04 06:25:53 +00:00
wh1te909
2f3c3361cf remove static clients list from audit log 2020-12-04 06:05:25 +00:00
wh1te909
4034134055 add task scheduler expire after wh1te909/rmmagent@fe91e5f110 2020-12-03 22:46:25 +00:00
sadnub
c04f94cb7b fix certificates on docker 2020-12-03 12:29:03 -05:00
sadnub
fd1bbc7925 Update docker-build-push.yml 2020-12-02 07:53:12 -05:00
wh1te909
ff69bed394 Release 0.2.5 2020-12-02 11:06:55 +00:00
wh1te909
d6e8c5146f bump version 2020-12-02 11:06:34 +00:00
wh1te909
9a04cf99d7 fix pending actions ui 2020-12-02 11:05:29 +00:00
wh1te909
86e7c11e71 fix mesh nginx 2020-12-02 10:40:20 +00:00
wh1te909
361cc08faa Release 0.2.4 2020-12-02 05:45:55 +00:00
wh1te909
70dc771052 bump rmm and agent ver 2020-12-02 05:35:13 +00:00
wh1te909
c14873a799 update optional args 2020-12-02 05:33:35 +00:00
wh1te909
bba5abd74b bump script vers 2020-12-02 05:16:16 +00:00
wh1te909
a224e79c1f bump mesh and vue 2020-12-02 04:51:05 +00:00
wh1te909
c305d98186 remove old code 2020-12-02 04:14:35 +00:00
wh1te909
7c5a473e71 add flag to skip salt during agent install 2020-12-02 04:00:36 +00:00
wh1te909
5e0f5d1eed check for old installers 2020-12-02 03:23:16 +00:00
wh1te909
238b269bc4 remove update salt task 2020-12-02 03:22:19 +00:00
Josh
0ad121b9d2 fix tests attempt 2 2020-12-01 16:46:38 +00:00
Josh
7088acd9fd fix tests and remove travis config 2020-12-01 16:41:59 +00:00
Josh
e0a900d4b6 test for rm_orphaned_task in core maintenance 2020-12-01 16:35:34 +00:00
Josh
a0fe2f0c7d fix tests 2020-12-01 16:11:03 +00:00
Josh
d5b9bc2f26 get cert file locations from settings in docker build 2020-12-01 16:10:49 +00:00
Josh
584254e6ca fix/add tests 2020-12-01 15:55:26 +00:00
wh1te909
a2963ed7bb reload table when pending action changed 2020-12-01 07:01:50 +00:00
wh1te909
2a3c2e133d fix wording 2020-12-01 06:43:52 +00:00
wh1te909
3e7dcb2755 don't hide refresh when sw list empty 2020-12-01 06:27:34 +00:00
wh1te909
faeec00b39 remove more tasks now handled by the agent 2020-12-01 06:16:09 +00:00
wh1te909
eeed81392f add rm orphaned tasks to maintenance tab 2020-12-01 05:55:27 +00:00
wh1te909
95dce9e992 check for supported agent 2020-12-01 05:52:32 +00:00
wh1te909
502bd2a191 patch nats 2020-12-01 05:16:47 +00:00
wh1te909
17ac92a9d0 remove dead code 2020-12-01 05:16:37 +00:00
wh1te909
ba028cde0c remove old api app 2020-12-01 05:00:13 +00:00
wh1te909
6e751e7a9b remove bg task that's handled by the agent now 2020-12-01 04:51:51 +00:00
wh1te909
948b56d0e6 add a ghetto check for non standard cert 2020-12-01 04:47:09 +00:00
wh1te909
4bf2dc9ece don't create unnecessary outage records 2020-12-01 04:44:38 +00:00
Josh
125823f8ab add server maintenance to tools menu 2020-12-01 03:44:58 +00:00
Josh
24d33397e9 add virtual scroll to audit log table 2020-12-01 02:17:20 +00:00
Josh
2c553825f4 add server-side pagination for audit logging 2020-12-01 02:01:10 +00:00
wh1te909
198c485e9a reduce threads 2020-11-30 21:51:25 +00:00
wh1te909
0138505507 reduce threads 2020-11-30 21:49:47 +00:00
wh1te909
5d50dcc600 add api endpoint for software 2020-11-30 21:45:12 +00:00
wh1te909
7bdd8c4626 add some type hints 2020-11-30 10:28:25 +00:00
wh1te909
fc82c35f0c finish moving schedtasks to nats 2020-11-30 08:18:47 +00:00
wh1te909
426ebad300 start moving schedtasks to nats wh1te909/rmmagent@0cde11a067 2020-11-29 23:40:29 +00:00
sadnub
1afe61c593 fix docker-compose.yml 2020-11-29 14:24:32 -05:00
wh1te909
c20751829b create migration for schedtask weekdays 2020-11-29 10:37:46 +00:00
Tragic Bronson
a3b8ee8392 Merge pull request #194 from sadnub/develop
Get mesh version for settings.py
2020-11-28 21:02:58 -08:00
Josh
156c0fe7f6 add dockerignore and get MESH_VER from settings.py 2020-11-29 04:47:34 +00:00
wh1te909
216f7a38cf support mesh > 0.6.84 wh1te909/rmmagent@85aab2facf 2020-11-29 04:15:57 +00:00
Tragic Bronson
fd04dc10d4 Merge pull request #193 from sadnub/feature-uichanges
Some fixes
2020-11-28 19:48:41 -08:00
Josh
d39bdce926 add install agent to site context menu 2020-11-29 03:30:31 +00:00
Josh
c6e01245b0 fix disabled prop on edit agent patch policy and agent checks tab 2020-11-29 02:56:35 +00:00
Josh
c168ee7ba4 bump app version and mesh version 2020-11-29 02:44:29 +00:00
Josh
7575253000 regenerate policies and tasks on site/client change on agent 2020-11-29 02:35:30 +00:00
Josh
c28c1efbb1 Add pending actions to agent table and filter 2020-11-29 02:13:50 +00:00
sadnub
e6aa2c3b78 Delete docker-build-publish.yml 2020-11-28 09:47:41 -05:00
sadnub
ab7c481f83 Create docker-build-push.yml 2020-11-28 09:47:27 -05:00
wh1te909
84ad1c352d Release 0.2.3 2020-11-28 06:09:38 +00:00
wh1te909
e9aad39ac9 bump version 2020-11-28 06:09:01 +00:00
wh1te909
c3444a87bc update backup/restore scripts for nats 2020-11-28 06:05:47 +00:00
sadnub
67b224b340 get automated builds working 2020-11-28 00:23:11 -05:00
sadnub
bded14d36b fix action file 2020-11-27 23:12:22 -05:00
sadnub
73fa0b6631 create github action for testing 2020-11-27 23:09:45 -05:00
Josh Krawczyk
2f07337588 fix mesh container and wait for nginx 2020-11-27 21:15:27 -05:00
wh1te909
da163d44e7 fix nats reload for old agents, fix domain parsing for non standard domains 2020-11-27 22:41:32 +00:00
Josh
56fbf8ae0c docker fixes for salt modules and nats config reload 2020-11-27 19:31:33 +00:00
wh1te909
327eb4b39b Release 0.2.2 2020-11-26 07:37:00 +00:00
wh1te909
ae7873a7e3 fix duplicate key error causing UI to freeze 2020-11-26 07:36:26 +00:00
wh1te909
9a5f01813b Release 0.2.1 2020-11-26 06:20:49 +00:00
wh1te909
0605a3b725 fix uninstall for older agents 2020-11-26 06:20:01 +00:00
247 changed files with 55262 additions and 47632 deletions

View File

@@ -0,0 +1,28 @@
COMPOSE_PROJECT_NAME=trmm
IMAGE_REPO=tacticalrmm/
VERSION=latest
# tactical credentials (Used to login to dashboard)
TRMM_USER=tactical
TRMM_PASS=tactical
# dns settings
APP_HOST=rmm.example.com
API_HOST=api.example.com
MESH_HOST=mesh.example.com
# mesh settings
MESH_USER=tactical
MESH_PASS=tactical
MONGODB_USER=mongouser
MONGODB_PASSWORD=mongopass
# database settings
POSTGRES_USER=postgres
POSTGRES_PASS=postgrespass
# DEV SETTINGS
APP_PORT=8000
API_PORT=8080
HTTP_PROTOCOL=https

View File

@@ -0,0 +1,28 @@
FROM python:3.8-slim
ENV TACTICAL_DIR /opt/tactical
ENV TACTICAL_GO_DIR /usr/local/rmmgo
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
ENV WORKSPACE_DIR /workspace
ENV TACTICAL_USER tactical
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
EXPOSE 8000
RUN groupadd -g 1000 tactical && \
useradd -u 1000 -g 1000 tactical
# Copy Go Files
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
# Copy Dev python reqs
COPY ./requirements.txt /
# Copy Docker Entrypoint
COPY ./entrypoint.sh /
RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm

View File

@@ -0,0 +1,19 @@
version: '3.4'
services:
api-dev:
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
ports:
- 8000:8000
- 5678:5678
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
networks:
dev:
aliases:
- tactical-backend

View File

@@ -0,0 +1,209 @@
version: '3.4'
services:
api-dev:
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-api"]
environment:
API_PORT: ${API_PORT}
ports:
- "8000:${API_PORT}"
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
networks:
dev:
aliases:
- tactical-backend
app-dev:
image: node:12-alpine
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
working_dir: /workspace/web
volumes:
- ..:/workspace:cached
ports:
- "8080:${APP_PORT}"
networks:
dev:
aliases:
- tactical-frontend
# nats
nats-dev:
image: ${IMAGE_REPO}tactical-nats:${VERSION}
restart: always
environment:
API_HOST: ${API_HOST}
API_PORT: ${API_PORT}
DEV: 1
ports:
- "4222:4222"
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
networks:
dev:
aliases:
- ${API_HOST}
- tactical-nats
# meshcentral container
meshcentral-dev:
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
restart: always
environment:
MESH_HOST: ${MESH_HOST}
MESH_USER: ${MESH_USER}
MESH_PASS: ${MESH_PASS}
MONGODB_USER: ${MONGODB_USER}
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
NGINX_HOST_IP: 172.21.0.20
networks:
dev:
aliases:
- tactical-meshcentral
- ${MESH_HOST}
volumes:
- tactical-data-dev:/opt/tactical
- mesh-data-dev:/home/node/app/meshcentral-data
depends_on:
- mongodb-dev
# mongodb container for meshcentral
mongodb-dev:
image: mongo:4.4
restart: always
environment:
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
MONGO_INITDB_DATABASE: meshcentral
networks:
dev:
aliases:
- tactical-mongodb
volumes:
- mongo-dev-data:/data/db
# postgres database for api service
postgres-dev:
image: postgres:13-alpine
restart: always
environment:
POSTGRES_DB: tacticalrmm
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASS}
volumes:
- postgres-data-dev:/var/lib/postgresql/data
networks:
dev:
aliases:
- tactical-postgres
# redis container for celery tasks
redis-dev:
restart: always
image: redis:6.0-alpine
networks:
dev:
aliases:
- tactical-redis
init-dev:
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
restart: on-failure
command: ["tactical-init-dev"]
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASS: ${POSTGRES_PASS}
APP_HOST: ${APP_HOST}
API_HOST: ${API_HOST}
MESH_HOST: ${MESH_HOST}
MESH_USER: ${MESH_USER}
TRMM_USER: ${TRMM_USER}
TRMM_PASS: ${TRMM_PASS}
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
APP_PORT: ${APP_PORT}
depends_on:
- postgres-dev
- meshcentral-dev
networks:
- dev
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
# container for celery worker service
celery-dev:
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celery-dev"]
restart: always
networks:
- dev
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
depends_on:
- postgres-dev
- redis-dev
# container for celery beat service
celerybeat-dev:
image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celerybeat-dev"]
restart: always
networks:
- dev
volumes:
- tactical-data-dev:/opt/tactical
- ..:/workspace:cached
depends_on:
- postgres-dev
- redis-dev
nginx-dev:
# container for tactical reverse proxy
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
restart: always
environment:
APP_HOST: ${APP_HOST}
API_HOST: ${API_HOST}
MESH_HOST: ${MESH_HOST}
CERT_PUB_KEY: ${CERT_PUB_KEY}
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
APP_PORT: ${APP_PORT}
API_PORT: ${API_PORT}
networks:
dev:
ipv4_address: 172.21.0.20
ports:
- "80:80"
- "443:443"
volumes:
- tactical-data-dev:/opt/tactical
volumes:
tactical-data-dev:
postgres-data-dev:
mongo-dev-data:
mesh-data-dev:
networks:
dev:
driver: bridge
ipam:
driver: default
config:
- subnet: 172.21.0.0/24

169
.devcontainer/entrypoint.sh Normal file
View File

@@ -0,0 +1,169 @@
#!/usr/bin/env bash
set -e
: "${TRMM_USER:=tactical}"
: "${TRMM_PASS:=tactical}"
: "${POSTGRES_HOST:=tactical-postgres}"
: "${POSTGRES_PORT:=5432}"
: "${POSTGRES_USER:=tactical}"
: "${POSTGRES_PASS:=tactical}"
: "${POSTGRES_DB:=tacticalrmm}"
: "${MESH_CONTAINER:=tactical-meshcentral}"
: "${MESH_USER:=meshcentral}"
: "${MESH_PASS:=meshcentralpass}"
: "${MESH_HOST:=tactical-meshcentral}"
: "${API_HOST:=tactical-backend}"
: "${APP_HOST:=tactical-frontend}"
: "${REDIS_HOST:=tactical-redis}"
: "${HTTP_PROTOCOL:=http}"
: "${APP_PORT:=8080}"
: "${API_PORT:=8000}"
# Add python venv to path
export PATH="${VIRTUAL_ENV}/bin:$PATH"
function check_tactical_ready {
sleep 15
until [ -f "${TACTICAL_READY_FILE}" ]; do
echo "waiting for init container to finish install or update..."
sleep 10
done
}
function django_setup {
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
echo "waiting for postgresql container to be ready..."
sleep 5
done
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
echo "waiting for meshcentral container to be ready..."
sleep 5
done
echo "setting up django environment"
# configure django settings
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
localvars="$(cat << EOF
SECRET_KEY = '${DJANGO_SEKRET}'
DEBUG = True
DOCKER_BUILD = True
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
KEY_FILE = '/opt/tactical/certs/privkey.pem'
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
ALLOWED_HOSTS = ['${API_HOST}', '*']
ADMIN_URL = 'admin/'
CORS_ORIGIN_ALLOW_ALL = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': '${POSTGRES_DB}',
'USER': '${POSTGRES_USER}',
'PASSWORD': '${POSTGRES_PASS}',
'HOST': '${POSTGRES_HOST}',
'PORT': '${POSTGRES_PORT}',
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = '${MESH_USER}'
MESH_SITE = 'https://${MESH_HOST}'
MESH_TOKEN_KEY = '${MESH_TOKEN}'
REDIS_HOST = '${REDIS_HOST}'
EOF
)"
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
# run migrations and init scripts
python manage.py migrate --no-input
python manage.py collectstatic --no-input
python manage.py initial_db_setup
python manage.py initial_mesh_setup
python manage.py load_chocos
python manage.py load_community_scripts
python manage.py reload_nats
# create super user
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
}
if [ "$1" = 'tactical-init-dev' ]; then
# make directories if they don't exist
mkdir -p ${TACTICAL_DIR}/tmp
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
# setup Python virtual env and install dependencies
test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV}
pip install --no-cache-dir -r /requirements.txt
django_setup
# create .env file for frontend
webenv="$(cat << EOF
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
APP_URL = https://${APP_HOST}
EOF
)"
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
# chown everything to tactical user
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
# create install ready file
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
fi
if [ "$1" = 'tactical-api' ]; then
cp ${WORKSPACE_DIR}/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
chmod +x /usr/local/bin/goversioninfo
check_tactical_ready
python manage.py runserver 0.0.0.0:${API_PORT}
fi
if [ "$1" = 'tactical-celery-dev' ]; then
check_tactical_ready
env/bin/celery -A tacticalrmm worker -l debug
fi
if [ "$1" = 'tactical-celerybeat-dev' ]; then
check_tactical_ready
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
env/bin/celery -A tacticalrmm beat -l debug
fi

View File

@@ -0,0 +1,44 @@
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
amqp==2.6.1
asgiref==3.3.1
asyncio-nats-client==0.11.4
billiard==3.6.3.0
celery==4.4.6
certifi==2020.12.5
cffi==1.14.3
chardet==3.0.4
cryptography==3.2.1
decorator==4.4.2
Django==3.1.4
django-cors-headers==3.5.0
django-rest-knox==4.1.0
djangorestframework==3.12.2
future==0.18.2
idna==2.10
kombu==4.6.11
loguru==0.5.3
msgpack==1.0.0
packaging==20.4
psycopg2-binary==2.8.6
pycparser==2.20
pycryptodome==3.9.9
pyotp==2.4.1
pyparsing==2.4.7
pytz==2020.4
qrcode==6.1
redis==3.5.3
requests==2.25.0
six==1.15.0
sqlparse==0.4.1
twilio==6.49.0
urllib3==1.26.2
validators==0.18.1
vine==1.3.0
websockets==8.1
zipp==3.4.0
black
Werkzeug
django-extensions
coverage
coveralls
model_bakery

25
.dockerignore Normal file
View File

@@ -0,0 +1,25 @@
**/__pycache__
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
**/env
README.md

12
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# These are supported funding model platforms
github: wh1te909
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

78
.github/workflows/docker-build-push.yml vendored Normal file
View File

@@ -0,0 +1,78 @@
name: Publish Tactical Docker Images
on:
push:
tags:
- "v*.*.*"
jobs:
docker:
name: Build and Push Docker Images
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Get Github Tag
id: prep
run: |
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and Push Tactical Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
- name: Build and Push Tactical MeshCentral Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-meshcentral/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
- name: Build and Push Tactical NATS Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-nats/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
- name: Build and Push Tactical Frontend Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-frontend/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
- name: Build and Push Tactical Nginx Image
uses: docker/build-push-action@v2
with:
context: .
push: true
pull: true
file: ./docker/containers/tactical-nginx/dockerfile
platforms: linux/amd64
tags: tacticalrmm/tactical-nginx:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nginx:latest

14
.vscode/launch.json vendored
View File

@@ -14,6 +14,20 @@
"0.0.0.0:8000"
],
"django": true
},
{
"name": "Django: Docker Remote Attach",
"type": "python",
"request": "attach",
"port": 5678,
"host": "localhost",
"preLaunchTask": "docker debug",
"pathMappings": [
{
"localRoot": "${workspaceFolder}/api/tacticalrmm",
"remoteRoot": "/workspace/api/tacticalrmm"
}
]
}
]
}

21
.vscode/settings.json vendored
View File

@@ -2,7 +2,7 @@
"python.pythonPath": "api/tacticalrmm/env/bin/python",
"python.languageServer": "Pylance",
"python.analysis.extraPaths": [
"api/tacticalrmm"
"api/tacticalrmm",
],
"python.analysis.typeCheckingMode": "basic",
"python.formatting.provider": "black",
@@ -41,4 +41,23 @@
"**/*.zip": true
},
},
"go.useLanguageServer": true,
"[go]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": false,
},
"editor.snippetSuggestions": "none",
},
"[go.mod]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
},
"gopls": {
"usePlaceholders": true,
"completeUnimported": true,
"staticcheck": true,
}
}

23
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,23 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "docker debug",
"type": "shell",
"command": "docker-compose",
"args": [
"-p",
"trmm",
"-f",
".devcontainer/docker-compose.yml",
"-f",
".devcontainer/docker-compose.debug.yml",
"up",
"-d",
"--build"
]
}
]
}

View File

@@ -6,7 +6,7 @@
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black)
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
# [LIVE DEMO](https://rmm.xlawgaming.com/)
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
@@ -36,7 +36,7 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
## Installation
### Requirements
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
- A domain you own with at least 3 subdomains
- Google Authenticator app (2 factor is NOT optional)
@@ -62,7 +62,6 @@ sudo ufw default allow outgoing
sudo ufw allow ssh
sudo ufw allow http
sudo ufw allow https
sudo ufw allow proto tcp from any to any port 4505,4506
sudo ufw allow proto tcp from any to any port 4222
sudo ufw enable && sudo ufw reload
```

View File

@@ -1,457 +0,0 @@
from __future__ import absolute_import
import psutil
import os
import datetime
import zlib
import json
import base64
import wmi
import win32evtlog
import win32con
import win32evtlogutil
import winerror
from time import sleep
import requests
import subprocess
import random
import platform
ARCH = "64" if platform.machine().endswith("64") else "32"
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
SYS_DRIVE = os.environ["SystemDrive"]
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
def get_services():
# see https://github.com/wh1te909/tacticalrmm/issues/38
# for why I am manually implementing the svc.as_dict() method of psutil
ret = []
for svc in psutil.win_service_iter():
i = {}
try:
i["display_name"] = svc.display_name()
i["binpath"] = svc.binpath()
i["username"] = svc.username()
i["start_type"] = svc.start_type()
i["status"] = svc.status()
i["pid"] = svc.pid()
i["name"] = svc.name()
i["description"] = svc.description()
except Exception:
continue
else:
ret.append(i)
return ret
def run_python_script(filename, timeout, script_type="userdefined"):
# no longer used in agent version 0.11.0
file_path = os.path.join(TEMP_DIR, filename)
if os.path.exists(file_path):
try:
os.remove(file_path)
except:
pass
if script_type == "userdefined":
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
else:
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
if shell == "powershell" or shell == "cmd":
if args:
return __salt__["cmd.script"](
source=filepath,
args=" ".join(map(lambda x: f'"{x}"', args)),
shell=shell,
timeout=timeout,
bg=bg,
)
else:
return __salt__["cmd.script"](
source=filepath, shell=shell, timeout=timeout, bg=bg
)
elif shell == "python":
file_path = os.path.join(TEMP_DIR, filename)
if os.path.exists(file_path):
try:
os.remove(file_path)
except:
pass
__salt__["cp.get_file"](filepath, file_path)
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
if args:
a = " ".join(map(lambda x: f'"{x}"', args))
cmd = f"{PY_BIN} {file_path} {a}"
return __salt__[salt_cmd](cmd, timeout=timeout)
else:
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
def uninstall_agent():
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
return "ok"
def update_salt():
for p in psutil.process_iter():
with p.oneshot():
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
return "running"
from subprocess import Popen, PIPE
CREATE_NEW_PROCESS_GROUP = 0x00000200
DETACHED_PROCESS = 0x00000008
cmd = [TAC_RMM, "-m", "updatesalt"]
p = Popen(
cmd,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
close_fds=True,
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
)
return p.pid
def run_manual_checks():
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
return "ok"
def install_updates():
for p in psutil.process_iter():
with p.oneshot():
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
return "running"
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
def _wait_for_service(svc, status, retries=10):
attempts = 0
while 1:
try:
service = psutil.win_service_get(svc)
except psutil.NoSuchProcess:
stat = "fail"
attempts += 1
sleep(5)
else:
stat = service.status()
if stat != status:
attempts += 1
sleep(5)
else:
attempts = 0
if attempts == 0 or attempts > retries:
break
return stat
def agent_update_v2(inno, url):
# make sure another instance of the update is not running
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
# so if more than 2 running, don't continue as an update is already running
count = 0
for p in psutil.process_iter():
try:
with p.oneshot():
if "win_agent.agent_update_v2" in p.cmdline():
count += 1
except Exception:
continue
if count > 2:
return "already running"
sleep(random.randint(1, 20)) # don't flood the rmm
exe = os.path.join(TEMP_DIR, inno)
if os.path.exists(exe):
try:
os.remove(exe)
except:
pass
try:
r = requests.get(url, stream=True, timeout=600)
except Exception:
return "failed"
if r.status_code != 200:
return "failed"
with open(exe, "wb") as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
del r
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
tac = _wait_for_service(svc="tacticalagent", status="running")
if tac != "running":
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
chk = _wait_for_service(svc="checkrunner", status="running")
if chk != "running":
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
return "ok"
def do_agent_update_v2(inno, url):
return __salt__["cmd.run_bg"](
[
SALT_CALL,
"win_agent.agent_update_v2",
f"inno={inno}",
f"url={url}",
"--local",
]
)
def agent_update(version, url):
# make sure another instance of the update is not running
# this function spawns 2 instances of itself so if more than 2 running,
# don't continue as an update is already running
count = 0
for p in psutil.process_iter():
try:
with p.oneshot():
if "win_agent.agent_update" in p.cmdline():
count += 1
except Exception:
continue
if count > 2:
return "already running"
sleep(random.randint(1, 60)) # don't flood the rmm
try:
r = requests.get(url, stream=True, timeout=600)
except Exception:
return "failed"
if r.status_code != 200:
return "failed"
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
with open(exe, "wb") as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
del r
services = ("tacticalagent", "checkrunner")
for svc in services:
subprocess.run([NSSM, "stop", svc], timeout=120)
sleep(10)
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
sleep(30)
for svc in services:
subprocess.run([NSSM, "start", svc], timeout=120)
return "ok"
def do_agent_update(version, url):
return __salt__["cmd.run_bg"](
[
SALT_CALL,
"win_agent.agent_update",
f"version={version}",
f"url={url}",
"--local",
]
)
class SystemDetail:
def __init__(self):
self.c = wmi.WMI()
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
self.comp_sys = self.c.Win32_ComputerSystem()
self.memory = self.c.Win32_PhysicalMemory()
self.os = self.c.Win32_OperatingSystem()
self.base_board = self.c.Win32_BaseBoard()
self.bios = self.c.Win32_BIOS()
self.disk = self.c.Win32_DiskDrive()
self.network_adapter = self.c.Win32_NetworkAdapter()
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
self.desktop_monitor = self.c.Win32_DesktopMonitor()
self.cpu = self.c.Win32_Processor()
self.usb = self.c.Win32_USBController()
def get_all(self, obj):
ret = []
for i in obj:
tmp = [
{j: getattr(i, j)}
for j in list(i.properties)
if getattr(i, j) is not None
]
ret.append(tmp)
return ret
def system_info():
info = SystemDetail()
return {
"comp_sys_prod": info.get_all(info.comp_sys_prod),
"comp_sys": info.get_all(info.comp_sys),
"mem": info.get_all(info.memory),
"os": info.get_all(info.os),
"base_board": info.get_all(info.base_board),
"bios": info.get_all(info.bios),
"disk": info.get_all(info.disk),
"network_adapter": info.get_all(info.network_adapter),
"network_config": info.get_all(info.network_config),
"desktop_monitor": info.get_all(info.desktop_monitor),
"cpu": info.get_all(info.cpu),
"usb": info.get_all(info.usb),
}
def local_sys_info():
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
def get_procs():
ret = []
# setup
for proc in psutil.process_iter():
with proc.oneshot():
proc.cpu_percent(interval=None)
# need time for psutil to record cpu percent
sleep(1)
for c, proc in enumerate(psutil.process_iter(), 1):
x = {}
with proc.oneshot():
if proc.pid == 0 or not proc.name():
continue
x["name"] = proc.name()
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
x["memory_percent"] = proc.memory_percent()
x["pid"] = proc.pid
x["ppid"] = proc.ppid()
x["status"] = proc.status()
x["username"] = proc.username()
x["id"] = c
ret.append(x)
return ret
def _compress_json(j):
return {
"wineventlog": base64.b64encode(
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
).decode("ascii", errors="ignore")
}
def get_eventlog(logtype, last_n_days):
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
status_dict = {
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
0: "INFO",
}
computer = "localhost"
hand = win32evtlog.OpenEventLog(computer, logtype)
total = win32evtlog.GetNumberOfEventLogRecords(hand)
log = []
uid = 0
done = False
try:
while 1:
events = win32evtlog.ReadEventLog(hand, flags, 0)
for ev_obj in events:
uid += 1
# return once total number of events reach or we'll be stuck in an infinite loop
if uid >= total:
done = True
break
the_time = ev_obj.TimeGenerated.Format()
time_obj = datetime.datetime.strptime(the_time, "%c")
if time_obj < start_time:
done = True
break
computer = str(ev_obj.ComputerName)
src = str(ev_obj.SourceName)
evt_type = str(status_dict[ev_obj.EventType])
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
evt_category = str(ev_obj.EventCategory)
record = str(ev_obj.RecordNumber)
msg = (
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
.replace("<", "")
.replace(">", "")
)
event_dict = {
"computer": computer,
"source": src,
"eventType": evt_type,
"eventID": evt_id,
"eventCategory": evt_category,
"message": msg,
"time": the_time,
"record": record,
"uid": uid,
}
log.append(event_dict)
if done:
break
except Exception:
pass
win32evtlog.CloseEventLog(hand)
return _compress_json(log)

View File

@@ -20,6 +20,5 @@ omit =
*/urls.py
*/tests.py
*/test.py
api/*.py
checks/utils.py

View File

@@ -6,28 +6,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_auto_20200810_0544'),
("accounts", "0002_auto_20200810_0544"),
]
operations = [
migrations.AddField(
model_name='user',
name='created_by',
model_name="user",
name="created_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='created_time',
model_name="user",
name="created_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='user',
name='modified_by',
model_name="user",
name="modified_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='modified_time',
model_name="user",
name="modified_time",
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -6,24 +6,24 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_auto_20200922_1344'),
("accounts", "0003_auto_20200922_1344"),
]
operations = [
migrations.RemoveField(
model_name='user',
name='created_by',
model_name="user",
name="created_by",
),
migrations.RemoveField(
model_name='user',
name='created_time',
model_name="user",
name="created_time",
),
migrations.RemoveField(
model_name='user',
name='modified_by',
model_name="user",
name="modified_by",
),
migrations.RemoveField(
model_name='user',
name='modified_time',
model_name="user",
name="modified_time",
),
]

View File

@@ -6,28 +6,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0004_auto_20201002_1257'),
("accounts", "0004_auto_20201002_1257"),
]
operations = [
migrations.AddField(
model_name='user',
name='created_by',
model_name="user",
name="created_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='created_time',
model_name="user",
name="created_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='user',
name='modified_by',
model_name="user",
name="modified_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='user',
name='modified_time',
model_name="user",
name="modified_time",
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0007_update_agent_primary_key'),
("accounts", "0007_update_agent_primary_key"),
]
operations = [
migrations.AddField(
model_name='user',
name='dark_mode',
model_name="user",
name="dark_mode",
field=models.BooleanField(default=True),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.4 on 2020-12-10 17:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0008_user_dark_mode"),
]
operations = [
migrations.AddField(
model_name="user",
name="show_community_scripts",
field=models.BooleanField(default=True),
),
]

View File

@@ -0,0 +1,26 @@
# Generated by Django 3.1.4 on 2021-01-14 01:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0009_user_show_community_scripts"),
]
operations = [
migrations.AddField(
model_name="user",
name="agent_dblclick_action",
field=models.CharField(
choices=[
("editagent", "Edit Agent"),
("takecontrol", "Take Control"),
("remotebg", "Remote Background"),
],
default="editagent",
max_length=50,
),
),
]

View File

@@ -0,0 +1,26 @@
# Generated by Django 3.1.5 on 2021-01-18 09:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0010_user_agent_dblclick_action"),
]
operations = [
migrations.AddField(
model_name="user",
name="default_agent_tbl_tab",
field=models.CharField(
choices=[
("server", "Servers"),
("workstation", "Workstations"),
("mixed", "Mixed"),
],
default="server",
max_length=50,
),
),
]

View File

@@ -3,11 +3,30 @@ from django.contrib.auth.models import AbstractUser
from logs.models import BaseAuditModel
AGENT_DBLCLICK_CHOICES = [
("editagent", "Edit Agent"),
("takecontrol", "Take Control"),
("remotebg", "Remote Background"),
]
AGENT_TBL_TAB_CHOICES = [
("server", "Servers"),
("workstation", "Workstations"),
("mixed", "Mixed"),
]
class User(AbstractUser, BaseAuditModel):
is_active = models.BooleanField(default=True)
totp_key = models.CharField(max_length=50, null=True, blank=True)
dark_mode = models.BooleanField(default=True)
show_community_scripts = models.BooleanField(default=True)
agent_dblclick_action = models.CharField(
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
)
default_agent_tbl_tab = models.CharField(
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
)
agent = models.OneToOneField(
"agents.Agent",

View File

@@ -155,6 +155,33 @@ class GetUpdateDeleteUser(TacticalTestCase):
self.check_not_authenticated("put", url)
@override_settings(ROOT_USER="john")
def test_put_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
data = {
"id": self.john.pk,
"username": "john",
"email": "johndoe@xlawgaming.com",
"first_name": "John",
"last_name": "Doe",
}
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
@override_settings(ROOT_USER="john")
def test_put_not_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
data = {
"id": self.john.pk,
"username": "john",
"email": "johndoe@xlawgaming.com",
"first_name": "John",
"last_name": "Doe",
}
self.client.force_authenticate(user=self.alice)
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 400)
def test_delete(self):
url = f"/accounts/{self.john.pk}/users/"
r = self.client.delete(url)
@@ -166,6 +193,19 @@ class GetUpdateDeleteUser(TacticalTestCase):
self.check_not_authenticated("delete", url)
@override_settings(ROOT_USER="john")
def test_delete_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
r = self.client.delete(url)
self.assertEqual(r.status_code, 200)
@override_settings(ROOT_USER="john")
def test_delete_non_root_user(self):
url = f"/accounts/{self.john.pk}/users/"
self.client.force_authenticate(user=self.alice)
r = self.client.delete(url)
self.assertEqual(r.status_code, 400)
class TestUserAction(TacticalTestCase):
def setUp(self):
@@ -184,6 +224,21 @@ class TestUserAction(TacticalTestCase):
self.check_not_authenticated("post", url)
@override_settings(ROOT_USER="john")
def test_post_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
@override_settings(ROOT_USER="john")
def test_post_non_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
self.client.force_authenticate(user=self.alice)
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
def test_put(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk}
@@ -195,12 +250,42 @@ class TestUserAction(TacticalTestCase):
self.check_not_authenticated("put", url)
def test_darkmode(self):
@override_settings(ROOT_USER="john")
def test_put_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk}
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
user = User.objects.get(pk=self.john.pk)
self.assertEqual(user.totp_key, "")
@override_settings(ROOT_USER="john")
def test_put_non_root_user(self):
url = "/accounts/users/reset/"
data = {"id": self.john.pk}
self.client.force_authenticate(user=self.alice)
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 400)
def test_user_ui(self):
url = "/accounts/users/ui/"
data = {"dark_mode": False}
r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200)
data = {"show_community_scripts": True}
r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200)
data = {
"userui": True,
"agent_dblclick_action": "editagent",
"default_agent_tbl_tab": "mixed",
}
r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("patch", url)

View File

@@ -60,7 +60,7 @@ class LoginView(KnoxLoginView):
if settings.DEBUG and token == "sekret":
valid = True
elif totp.verify(token, valid_window=1):
elif totp.verify(token, valid_window=10):
valid = True
if valid:
@@ -108,6 +108,13 @@ class GetUpdateDeleteUser(APIView):
def put(self, request, pk):
user = get_object_or_404(User, pk=pk)
if (
hasattr(settings, "ROOT_USER")
and request.user != user
and user.username == settings.ROOT_USER
):
return notify_error("The root user cannot be modified from the UI")
serializer = UserSerializer(instance=user, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
@@ -115,7 +122,15 @@ class GetUpdateDeleteUser(APIView):
return Response("ok")
def delete(self, request, pk):
get_object_or_404(User, pk=pk).delete()
user = get_object_or_404(User, pk=pk)
if (
hasattr(settings, "ROOT_USER")
and request.user != user
and user.username == settings.ROOT_USER
):
return notify_error("The root user cannot be deleted from the UI")
user.delete()
return Response("ok")
@@ -124,8 +139,14 @@ class UserActions(APIView):
# reset password
def post(self, request):
user = get_object_or_404(User, pk=request.data["id"])
if (
hasattr(settings, "ROOT_USER")
and request.user != user
and user.username == settings.ROOT_USER
):
return notify_error("The root user cannot be modified from the UI")
user.set_password(request.data["password"])
user.save()
@@ -133,8 +154,14 @@ class UserActions(APIView):
# reset two factor token
def put(self, request):
user = get_object_or_404(User, pk=request.data["id"])
if (
hasattr(settings, "ROOT_USER")
and request.user != user
and user.username == settings.ROOT_USER
):
return notify_error("The root user cannot be modified from the UI")
user.totp_key = ""
user.save()
@@ -161,6 +188,18 @@ class TOTPSetup(APIView):
class UserUI(APIView):
def patch(self, request):
user = request.user
user.dark_mode = request.data["dark_mode"]
user.save(update_fields=["dark_mode"])
return Response("ok")
if "dark_mode" in request.data.keys():
user.dark_mode = request.data["dark_mode"]
user.save(update_fields=["dark_mode"])
if "show_community_scripts" in request.data.keys():
user.show_community_scripts = request.data["show_community_scripts"]
user.save(update_fields=["show_community_scripts"])
if "userui" in request.data.keys():
user.agent_dblclick_action = request.data["agent_dblclick_action"]
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
return Response("ok")

View File

@@ -26,7 +26,7 @@ def get_wmi_data():
agent = Recipe(
Agent,
hostname="DESKTOP-TEST123",
version="1.1.0",
version="1.3.0",
monitoring_type=cycle(["workstation", "server"]),
salt_id=generate_agent_id("DESKTOP-TEST123"),
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",

View File

@@ -7,14 +7,20 @@ import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('clients', '0006_deployment'),
('agents', '0020_auto_20201025_2129'),
("clients", "0006_deployment"),
("agents", "0020_auto_20201025_2129"),
]
operations = [
migrations.AddField(
model_name='agent',
name='site_link',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'),
model_name="agent",
name="site_link",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="agents",
to="clients.site",
),
),
]

View File

@@ -6,16 +6,16 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0022_update_site_primary_key'),
("agents", "0022_update_site_primary_key"),
]
operations = [
migrations.RemoveField(
model_name='agent',
name='client',
model_name="agent",
name="client",
),
migrations.RemoveField(
model_name='agent',
name='site',
model_name="agent",
name="site",
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0023_auto_20201101_2312'),
("agents", "0023_auto_20201101_2312"),
]
operations = [
migrations.RenameField(
model_name='agent',
old_name='site_link',
new_name='site',
model_name="agent",
old_name="site_link",
new_name="site",
),
]

View File

@@ -6,13 +6,22 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0024_auto_20201101_2319'),
("agents", "0024_auto_20201101_2319"),
]
operations = [
migrations.AlterField(
model_name='recoveryaction',
name='mode',
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC')], default='mesh', max_length=50),
model_name="recoveryaction",
name="mode",
field=models.CharField(
choices=[
("salt", "Salt"),
("mesh", "Mesh"),
("command", "Command"),
("rpc", "Nats RPC"),
],
default="mesh",
max_length=50,
),
),
]

View File

@@ -6,13 +6,23 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0025_auto_20201122_0407'),
("agents", "0025_auto_20201122_0407"),
]
operations = [
migrations.AlterField(
model_name='recoveryaction',
name='mode',
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC'), ('checkrunner', 'Checkrunner')], default='mesh', max_length=50),
model_name="recoveryaction",
name="mode",
field=models.CharField(
choices=[
("salt", "Salt"),
("mesh", "Mesh"),
("command", "Command"),
("rpc", "Nats RPC"),
("checkrunner", "Checkrunner"),
],
default="mesh",
max_length=50,
),
),
]

View File

@@ -1,5 +1,3 @@
import requests
import datetime as dt
import time
import base64
from Crypto.Cipher import AES
@@ -8,10 +6,9 @@ from Crypto.Hash import SHA3_384
from Crypto.Util.Padding import pad
import validators
import msgpack
import random
import re
import string
from collections import Counter
from typing import List
from loguru import logger
from packaging import version as pyver
from distutils.version import LooseVersion
@@ -89,6 +86,10 @@ class Agent(BaseAuditModel):
def has_nats(self):
return pyver.parse(self.version) >= pyver.parse("1.1.0")
@property
def has_gotasks(self):
return pyver.parse(self.version) >= pyver.parse("1.1.1")
@property
def timezone(self):
# return the default timezone unless the timezone is explicity set per agent
@@ -116,14 +117,6 @@ class Agent(BaseAuditModel):
return settings.DL_32
return None
@property
def winsalt_dl(self):
if self.arch == "64":
return settings.SALT_64
elif self.arch == "32":
return settings.SALT_32
return None
@property
def win_inno_exe(self):
if self.arch == "64":
@@ -163,13 +156,11 @@ class Agent(BaseAuditModel):
elif i.status == "failing":
failing += 1
has_failing_checks = True if failing > 0 else False
ret = {
"total": total,
"passing": passing,
"failing": failing,
"has_failing_checks": has_failing_checks,
"has_failing_checks": failing > 0,
}
return ret
@@ -383,14 +374,15 @@ class Agent(BaseAuditModel):
return patch_policy
# clear is used to delete managed policy checks from agent
# parent_checks specifies a list of checks to delete from agent with matching parent_check field
def generate_checks_from_policies(self, clear=False):
from automation.models import Policy
def get_approved_update_guids(self) -> List[str]:
return list(
self.winupdates.filter(action="approve", installed=False).values_list(
"guid", flat=True
)
)
# Clear agent checks managed by policy
if clear:
self.agentchecks.filter(managed_by_policy=True).delete()
def generate_checks_from_policies(self):
from automation.models import Policy
# Clear agent checks that have overriden_by_policy set
self.agentchecks.update(overriden_by_policy=False)
@@ -398,17 +390,9 @@ class Agent(BaseAuditModel):
# Generate checks based on policies
Policy.generate_policy_checks(self)
# clear is used to delete managed policy tasks from agent
# parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
def generate_tasks_from_policies(self, clear=False):
from autotasks.tasks import delete_win_task_schedule
def generate_tasks_from_policies(self):
from automation.models import Policy
# Clear agent tasks managed by policy
if clear:
for task in self.autotasks.filter(managed_by_policy=True):
delete_win_task_schedule.delay(task.pk)
# Generate tasks based on policies
Policy.generate_policy_tasks(self)
@@ -467,77 +451,6 @@ class Agent(BaseAuditModel):
await nc.flush()
await nc.close()
def salt_api_cmd(self, **kwargs):
# salt should always timeout first before the requests' timeout
try:
timeout = kwargs["timeout"]
except KeyError:
# default timeout
timeout = 15
salt_timeout = 12
else:
if timeout < 8:
timeout = 8
salt_timeout = 5
else:
salt_timeout = timeout - 3
json = {
"client": "local",
"tgt": self.salt_id,
"fun": kwargs["func"],
"timeout": salt_timeout,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
if "arg" in kwargs:
json.update({"arg": kwargs["arg"]})
if "kwargs" in kwargs:
json.update({"kwarg": kwargs["kwargs"]})
try:
resp = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[json],
timeout=timeout,
)
except Exception:
return "timeout"
try:
ret = resp.json()["return"][0][self.salt_id]
except Exception as e:
logger.error(f"{self.salt_id}: {e}")
return "error"
else:
return ret
def salt_api_async(self, **kwargs):
json = {
"client": "local_async",
"tgt": self.salt_id,
"fun": kwargs["func"],
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
if "arg" in kwargs:
json.update({"arg": kwargs["arg"]})
if "kwargs" in kwargs:
json.update({"kwarg": kwargs["kwargs"]})
try:
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
except Exception:
return "timeout"
return resp
@staticmethod
def serialize(agent):
# serializes the agent and returns json
@@ -545,89 +458,9 @@ class Agent(BaseAuditModel):
ret = AgentEditSerializer(agent).data
del ret["all_timezones"]
del ret["client"]
return ret
@staticmethod
def salt_batch_async(**kwargs):
assert isinstance(kwargs["minions"], list)
json = {
"client": "local_async",
"tgt_type": "list",
"tgt": kwargs["minions"],
"fun": kwargs["func"],
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
if "arg" in kwargs:
json.update({"arg": kwargs["arg"]})
if "kwargs" in kwargs:
json.update({"kwarg": kwargs["kwargs"]})
try:
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
except Exception:
return "timeout"
return resp
def schedule_reboot(self, obj):
start_date = dt.datetime.strftime(obj, "%Y-%m-%d")
start_time = dt.datetime.strftime(obj, "%H:%M")
# let windows task scheduler automatically delete the task after it runs
end_obj = obj + dt.timedelta(minutes=15)
end_date = dt.datetime.strftime(end_obj, "%Y-%m-%d")
end_time = dt.datetime.strftime(end_obj, "%H:%M")
task_name = "TacticalRMM_SchedReboot_" + "".join(
random.choice(string.ascii_letters) for _ in range(10)
)
r = self.salt_api_cmd(
timeout=15,
func="task.create_task",
arg=[
f"name={task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Windows\\System32\\shutdown.exe"',
'arguments="/r /t 5 /f"',
"trigger_type=Once",
f'start_date="{start_date}"',
f'start_time="{start_time}"',
f'end_date="{end_date}"',
f'end_time="{end_time}"',
"ac_only=False",
"stop_if_on_batteries=False",
"delete_after=Immediately",
],
)
if r == "error" or (isinstance(r, bool) and not r):
return "failed"
elif r == "timeout":
return "timeout"
elif isinstance(r, bool) and r:
from logs.models import PendingAction
details = {
"taskname": task_name,
"time": str(obj),
}
PendingAction(agent=self, action_type="schedreboot", details=details).save()
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
return {"msg": {"time": nice_time, "agent": self.hostname}}
else:
return "failed"
def not_supported(self, version_added):
return pyver.parse(self.version) < pyver.parse(version_added)
def delete_superseded_updates(self):
try:
pks = [] # list of pks to delete
@@ -680,6 +513,13 @@ class Agent(BaseAuditModel):
elif action.details["action"] == "taskdelete":
delete_win_task_schedule.delay(task_id, pending_action=action.id)
# for clearing duplicate pending actions on agent
def remove_matching_pending_task_actions(self, task_id):
# remove any other pending actions on agent with same task_id
for action in self.pendingactions.exclude(status="completed"):
if action.details["task_id"] == task_id:
action.delete()
class AgentOutage(models.Model):
agent = models.ForeignKey(

View File

@@ -34,21 +34,44 @@ class AgentSerializer(serializers.ModelSerializer):
]
class AgentOverdueActionSerializer(serializers.ModelSerializer):
class Meta:
model = Agent
fields = ["pk", "overdue_email_alert", "overdue_text_alert"]
class AgentTableSerializer(serializers.ModelSerializer):
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
pending_actions = serializers.SerializerMethodField()
status = serializers.ReadOnlyField()
checks = serializers.ReadOnlyField()
last_seen = serializers.SerializerMethodField()
client_name = serializers.ReadOnlyField(source="client.name")
site_name = serializers.ReadOnlyField(source="site.name")
logged_username = serializers.SerializerMethodField()
italic = serializers.SerializerMethodField()
def get_last_seen(self, obj):
def get_pending_actions(self, obj):
return obj.pendingactions.filter(status="pending").count()
def get_last_seen(self, obj) -> str:
if obj.time_zone is not None:
agent_tz = pytz.timezone(obj.time_zone)
else:
agent_tz = self.context["default_tz"]
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
return obj.last_seen.astimezone(agent_tz).timestamp()
def get_logged_username(self, obj) -> str:
if obj.logged_in_username == "None" and obj.status == "online":
return obj.last_logged_in_user
elif obj.logged_in_username != "None":
return obj.logged_in_username
else:
return "-"
def get_italic(self, obj) -> bool:
return obj.logged_in_username == "None" and obj.status == "online"
class Meta:
model = Agent
@@ -62,15 +85,16 @@ class AgentTableSerializer(serializers.ModelSerializer):
"description",
"needs_reboot",
"patches_pending",
"pending_actions",
"status",
"overdue_text_alert",
"overdue_email_alert",
"last_seen",
"boot_time",
"checks",
"logged_in_username",
"last_logged_in_user",
"maintenance_mode",
"logged_username",
"italic",
]
depth = 2

View File

@@ -2,12 +2,11 @@ import asyncio
from loguru import logger
from time import sleep
import random
import requests
from packaging import version as pyver
from typing import List
from django.conf import settings
from scripts.models import Script
from tacticalrmm.celery import app
from agents.models import Agent, AgentOutage
@@ -16,280 +15,102 @@ from logs.models import PendingAction
logger.configure(**settings.LOG_CONFIG)
OLD_64_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2.exe"
OLD_32_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2-x86.exe"
def agent_update(pk: int) -> str:
agent = Agent.objects.get(pk=pk)
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
return "noarch"
# removed sqlite in 1.4.0 to get rid of cgo dependency
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
version = settings.LATEST_AGENT_VER
url = agent.winagent_dl
inno = agent.win_inno_exe
else:
version = "1.3.0"
inno = (
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
)
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
if agent.has_nats:
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
action = agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).last()
if pyver.parse(action.details["version"]) < pyver.parse(version):
action.delete()
else:
return "pending"
PendingAction.objects.create(
agent=agent,
action_type="agentupdate",
details={
"url": url,
"version": version,
"inno": inno,
},
)
else:
nats_data = {
"func": "agentupdate",
"payload": {
"url": url,
"version": version,
"inno": inno,
},
}
asyncio.run(agent.nats_cmd(nats_data, wait=False))
return "created"
else:
logger.warning(
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to update."
)
return "not supported"
@app.task
def send_agent_update_task(pks, version):
assert isinstance(pks, list)
def send_agent_update_task(pks: List[int], version: str) -> None:
q = Agent.objects.filter(pk__in=pks)
agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)]
agents: List[int] = [
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
]
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(
f"Unable to determine arch on {agent.salt_id}. Skipping."
)
continue
# golang agent only backwards compatible with py agent 0.11.2
# force an upgrade to the latest python agent if version < 0.11.2
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
inno = (
"winagent-v0.11.2.exe"
if agent.arch == "64"
else "winagent-v0.11.2-x86.exe"
)
else:
url = agent.winagent_dl
inno = agent.win_inno_exe
logger.info(
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
)
if agent.has_nats:
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
continue
PendingAction.objects.create(
agent=agent,
action_type="agentupdate",
details={
"url": agent.winagent_dl,
"version": settings.LATEST_AGENT_VER,
"inno": agent.win_inno_exe,
},
)
# TODO
# Salt is deprecated, remove this once salt is gone
else:
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
sleep(10)
agent_update(pk)
sleep(0.05)
sleep(4)
@app.task
def auto_self_agent_update_task():
def auto_self_agent_update_task() -> None:
core = CoreSettings.objects.first()
if not core.agent_auto_update:
logger.info("Agent auto update is disabled. Skipping.")
return
q = Agent.objects.only("pk", "version")
agents = [
pks: List[int] = [
i.pk
for i in q
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
]
logger.info(f"Updating {len(agents)}")
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(
f"Unable to determine arch on {agent.salt_id}. Skipping."
)
continue
# golang agent only backwards compatible with py agent 0.11.2
# force an upgrade to the latest python agent if version < 0.11.2
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
inno = (
"winagent-v0.11.2.exe"
if agent.arch == "64"
else "winagent-v0.11.2-x86.exe"
)
else:
url = agent.winagent_dl
inno = agent.win_inno_exe
logger.info(
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
)
if agent.has_nats:
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
continue
PendingAction.objects.create(
agent=agent,
action_type="agentupdate",
details={
"url": agent.winagent_dl,
"version": settings.LATEST_AGENT_VER,
"inno": agent.win_inno_exe,
},
)
# TODO
# Salt is deprecated, remove this once salt is gone
else:
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
sleep(10)
@app.task
def update_salt_minion_task():
q = Agent.objects.all()
agents = [
i.pk
for i in q
if pyver.parse(i.version) >= pyver.parse("0.11.0")
and pyver.parse(i.salt_ver) < pyver.parse(settings.LATEST_SALT_VER)
]
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_async(func="win_agent.update_salt")
sleep(20)
@app.task
def get_wmi_detail_task(pk):
agent = Agent.objects.get(pk=pk)
if agent.has_nats:
asyncio.run(agent.nats_cmd({"func": "sysinfo"}, wait=False))
else:
agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
return "ok"
@app.task
def sync_salt_modules_task(pk):
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
if r == "timeout" or r == "error":
return f"Unable to sync modules {agent.salt_id}"
return f"Successfully synced salt modules on {agent.hostname}"
@app.task
def batch_sync_modules_task():
# sync modules, split into chunks of 50 agents to not overload salt
agents = Agent.objects.all()
online = [i.salt_id for i in agents]
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
for chunk in chunks:
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
sleep(10)
@app.task
def batch_sysinfo_task():
# update system info using WMI
agents = Agent.objects.all()
agents_nats = [agent for agent in agents if agent.has_nats]
minions = [
agent.salt_id
for agent in agents
if not agent.has_nats and pyver.parse(agent.version) >= pyver.parse("0.11.0")
]
if minions:
Agent.salt_batch_async(minions=minions, func="win_agent.local_sys_info")
for agent in agents_nats:
asyncio.run(agent.nats_cmd({"func": "sysinfo"}, wait=False))
@app.task
def uninstall_agent_task(salt_id):
attempts = 0
error = False
while 1:
try:
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "local",
"tgt": salt_id,
"fun": "win_agent.uninstall_agent",
"timeout": 8,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=10,
)
ret = r.json()["return"][0][salt_id]
except Exception:
attempts += 1
else:
if ret != "ok":
attempts += 1
else:
attempts = 0
if attempts >= 10:
error = True
break
elif attempts == 0:
break
if error:
logger.error(f"{salt_id} uninstall failed")
else:
logger.info(f"{salt_id} was successfully uninstalled")
try:
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "wheel",
"fun": "key.delete",
"match": salt_id,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=30,
)
except Exception:
logger.error(f"{salt_id} unable to remove salt-key")
return "ok"
agent_update(pk)
sleep(0.05)
sleep(4)
@app.task
@@ -330,19 +151,104 @@ def agent_recovery_sms_task(pk):
@app.task
def agent_outages_task():
agents = Agent.objects.only("pk")
agents = Agent.objects.only(
"pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert"
)
for agent in agents:
if agent.status == "overdue":
outages = AgentOutage.objects.filter(agent=agent)
if outages and outages.last().is_active:
continue
if agent.overdue_email_alert or agent.overdue_text_alert:
if agent.status == "overdue":
outages = AgentOutage.objects.filter(agent=agent)
if outages and outages.last().is_active:
continue
outage = AgentOutage(agent=agent)
outage.save()
outage = AgentOutage(agent=agent)
outage.save()
if agent.overdue_email_alert and not agent.maintenance_mode:
agent_outage_email_task.delay(pk=outage.pk)
# add a null check history to allow gaps in graph
for check in agent.agentchecks.all():
check.add_check_history(None)
if agent.overdue_text_alert and not agent.maintenance_mode:
agent_outage_sms_task.delay(pk=outage.pk)
if agent.overdue_email_alert and not agent.maintenance_mode:
agent_outage_email_task.delay(pk=outage.pk)
if agent.overdue_text_alert and not agent.maintenance_mode:
agent_outage_sms_task.delay(pk=outage.pk)
@app.task
def handle_agent_recovery_task(pk: int) -> None:
sleep(10)
from agents.models import RecoveryAction
action = RecoveryAction.objects.get(pk=pk)
if action.mode == "command":
data = {"func": "recoverycmd", "recoverycommand": action.command}
else:
data = {"func": "recover", "payload": {"mode": action.mode}}
asyncio.run(action.agent.nats_cmd(data, wait=False))
@app.task
def run_script_email_results_task(
agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str]
):
agent = Agent.objects.get(pk=agentpk)
script = Script.objects.get(pk=scriptpk)
nats_data["func"] = "runscriptfull"
r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout))
if r == "timeout":
logger.error(f"{agent.hostname} timed out running script.")
return
CORE = CoreSettings.objects.first()
subject = f"{agent.hostname} {script.name} Results"
exec_time = "{:.4f}".format(r["execution_time"])
body = (
subject
+ f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}"
)
import smtplib
from email.message import EmailMessage
msg = EmailMessage()
msg["Subject"] = subject
msg["From"] = CORE.smtp_from_email
if emails:
msg["To"] = ", ".join(emails)
else:
msg["To"] = ", ".join(CORE.email_alert_recipients)
msg.set_content(body)
try:
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
if CORE.smtp_requires_auth:
server.ehlo()
server.starttls()
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
server.send_message(msg)
server.quit()
else:
server.send_message(msg)
server.quit()
except Exception as e:
logger.error(e)
@app.task
def remove_salt_task() -> None:
if hasattr(settings, "KEEP_SALT") and settings.KEEP_SALT:
return
q = Agent.objects.only("pk", "version")
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
for chunk in chunks:
for agent in chunk:
asyncio.run(agent.nats_cmd({"func": "removesalt"}, wait=False))
sleep(0.1)
sleep(4)

View File

@@ -5,23 +5,15 @@ from unittest.mock import patch
from model_bakery import baker
from itertools import cycle
from django.test import TestCase, override_settings
from django.conf import settings
from django.utils import timezone as djangotime
from logs.models import PendingAction
from tacticalrmm.test import TacticalTestCase
from .serializers import AgentSerializer
from winupdate.serializers import WinUpdatePolicySerializer
from .models import Agent
from .tasks import (
auto_self_agent_update_task,
update_salt_minion_task,
get_wmi_detail_task,
sync_salt_modules_task,
batch_sync_modules_task,
batch_sysinfo_task,
OLD_64_PY_AGENT,
OLD_32_PY_AGENT,
)
from winupdate.models import WinUpdatePolicy
@@ -33,7 +25,7 @@ class TestAgentViews(TacticalTestCase):
client = baker.make("clients.Client", name="Google")
site = baker.make("clients.Site", client=client, name="LA Office")
self.agent = baker.make_recipe(
"agents.online_agent", site=site, version="1.1.0"
"agents.online_agent", site=site, version="1.1.1"
)
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
@@ -112,9 +104,8 @@ class TestAgentViews(TacticalTestCase):
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.nats_cmd")
@patch("agents.tasks.uninstall_agent_task.delay")
@patch("agents.views.reload_nats")
def test_uninstall(self, reload_nats, mock_task, nats_cmd):
def test_uninstall(self, reload_nats, nats_cmd):
url = "/agents/uninstall/"
data = {"pk": self.agent.pk}
@@ -123,13 +114,18 @@ class TestAgentViews(TacticalTestCase):
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
reload_nats.assert_called_once()
mock_task.assert_called_with(self.agent.salt_id)
self.check_not_authenticated("delete", url)
@patch("agents.models.Agent.nats_cmd")
def test_get_processes(self, mock_ret):
url = f"/agents/{self.agent.pk}/getprocs/"
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
url_old = f"/agents/{agent_old.pk}/getprocs/"
r = self.client.get(url_old)
self.assertEqual(r.status_code, 400)
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
url = f"/agents/{agent.pk}/getprocs/"
with open(
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
@@ -139,9 +135,7 @@ class TestAgentViews(TacticalTestCase):
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
assert any(
i["memory_percent"] == 0.004843281375620747 for i in mock_ret.return_value
)
assert any(i["membytes"] == 434655234324 for i in mock_ret.return_value)
mock_ret.return_value = "timeout"
r = self.client.get(url)
@@ -168,28 +162,54 @@ class TestAgentViews(TacticalTestCase):
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.nats_cmd")
def test_get_event_log(self, mock_ret):
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
def test_get_event_log(self, nats_cmd):
url = f"/agents/{self.agent.pk}/geteventlog/Application/22/"
with open(
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
) as f:
mock_ret.return_value = json.load(f)
nats_cmd.return_value = json.load(f)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
nats_cmd.assert_called_with(
{
"func": "eventlog",
"timeout": 30,
"payload": {
"logname": "Application",
"days": str(22),
},
},
timeout=32,
)
mock_ret.return_value = "timeout"
url = f"/agents/{self.agent.pk}/geteventlog/Security/6/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
nats_cmd.assert_called_with(
{
"func": "eventlog",
"timeout": 180,
"payload": {
"logname": "Security",
"days": str(6),
},
},
timeout=182,
)
nats_cmd.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.nats_cmd")
def test_power_action(self, nats_cmd):
url = f"/agents/poweraction/"
def test_reboot_now(self, nats_cmd):
url = f"/agents/reboot/"
data = {"pk": self.agent.pk, "action": "rebootnow"}
data = {"pk": self.agent.pk}
nats_cmd.return_value = "ok"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
@@ -222,30 +242,37 @@ class TestAgentViews(TacticalTestCase):
self.check_not_authenticated("post", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_reboot_later(self, mock_ret):
url = f"/agents/rebootlater/"
@patch("agents.models.Agent.nats_cmd")
def test_reboot_later(self, nats_cmd):
url = f"/agents/reboot/"
data = {
"pk": self.agent.pk,
"datetime": "2025-08-29 18:41",
}
mock_ret.return_value = True
r = self.client.post(url, data, format="json")
nats_cmd.return_value = "ok"
r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
self.assertEqual(r.data["agent"], self.agent.hostname)
mock_ret.return_value = "failed"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "schedreboot",
"trigger": "once",
"name": r.data["task_name"],
"year": 2025,
"month": "August",
"day": 29,
"hour": 18,
"min": 41,
},
}
nats_cmd.assert_called_with(nats_data, timeout=10)
mock_ret.return_value = "timeout"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
mock_ret.return_value = False
nats_cmd.return_value = "error creating task"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
@@ -253,12 +280,12 @@ class TestAgentViews(TacticalTestCase):
"pk": self.agent.pk,
"datetime": "rm -rf /",
}
r = self.client.post(url, data_invalid, format="json")
r = self.client.patch(url, data_invalid, format="json")
self.assertEqual(r.status_code, 400)
self.assertEqual(r.data, "Invalid date")
self.check_not_authenticated("post", url)
self.check_not_authenticated("patch", url)
@patch("os.path.exists")
@patch("subprocess.run")
@@ -326,7 +353,7 @@ class TestAgentViews(TacticalTestCase):
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
data["mode"] = "salt"
data["mode"] = "mesh"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
self.assertIn("pending", r.json())
@@ -346,7 +373,7 @@ class TestAgentViews(TacticalTestCase):
self.agent.version = "0.9.4"
self.agent.save(update_fields=["version"])
data["mode"] = "salt"
data["mode"] = "mesh"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
self.assertIn("0.9.5", r.json())
@@ -428,7 +455,14 @@ class TestAgentViews(TacticalTestCase):
self.assertIn("&viewmode=13", r.data["file"])
self.assertIn("&viewmode=12", r.data["terminal"])
self.assertIn("&viewmode=11", r.data["control"])
self.assertIn("mstsc.html?login=", r.data["webrdp"])
self.assertIn("&gotonode=", r.data["file"])
self.assertIn("&gotonode=", r.data["terminal"])
self.assertIn("&gotonode=", r.data["control"])
self.assertIn("?login=", r.data["file"])
self.assertIn("?login=", r.data["terminal"])
self.assertIn("?login=", r.data["control"])
self.assertEqual(self.agent.hostname, r.data["hostname"])
self.assertEqual(self.agent.client.name, r.data["client"])
@@ -471,42 +505,20 @@ class TestAgentViews(TacticalTestCase):
def test_overdue_action(self):
url = "/agents/overdueaction/"
payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"}
payload = {"pk": self.agent.pk, "overdue_email_alert": True}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
agent = Agent.objects.get(pk=self.agent.pk)
self.assertTrue(agent.overdue_email_alert)
self.assertEqual(self.agent.hostname, r.data)
payload.update({"alertType": "email", "action": "disabled"})
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
agent = Agent.objects.get(pk=self.agent.pk)
self.assertFalse(agent.overdue_email_alert)
self.assertEqual(self.agent.hostname, r.data)
payload.update({"alertType": "text", "action": "enabled"})
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
agent = Agent.objects.get(pk=self.agent.pk)
self.assertTrue(agent.overdue_text_alert)
self.assertEqual(self.agent.hostname, r.data)
payload.update({"alertType": "text", "action": "disabled"})
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
agent = Agent.objects.get(pk=self.agent.pk)
self.assertFalse(agent.overdue_text_alert)
self.assertEqual(self.agent.hostname, r.data)
payload.update({"alertType": "email", "action": "523423"})
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
payload.update({"alertType": "text", "action": "asdasd3434asdasd"})
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("post", url)
def test_list_agents_no_detail(self):
@@ -527,7 +539,7 @@ class TestAgentViews(TacticalTestCase):
self.check_not_authenticated("get", url)
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
""" @patch("winupdate.tasks.bulk_check_for_updates_task.delay")
@patch("scripts.tasks.handle_bulk_script_task.delay")
@patch("scripts.tasks.handle_bulk_command_task.delay")
@patch("agents.models.Agent.salt_batch_async")
@@ -538,6 +550,7 @@ class TestAgentViews(TacticalTestCase):
payload = {
"mode": "command",
"monType": "all",
"target": "agents",
"client": None,
"site": None,
@@ -555,6 +568,7 @@ class TestAgentViews(TacticalTestCase):
payload = {
"mode": "command",
"monType": "servers",
"target": "agents",
"client": None,
"site": None,
@@ -569,12 +583,11 @@ class TestAgentViews(TacticalTestCase):
payload = {
"mode": "command",
"monType": "workstations",
"target": "client",
"client": self.agent.client.id,
"site": None,
"agentPKs": [
self.agent.pk,
],
"agentPKs": [],
"cmd": "gpupdate /force",
"timeout": 300,
"shell": "cmd",
@@ -586,6 +599,7 @@ class TestAgentViews(TacticalTestCase):
payload = {
"mode": "command",
"monType": "all",
"target": "client",
"client": self.agent.client.id,
"site": self.agent.site.id,
@@ -603,6 +617,7 @@ class TestAgentViews(TacticalTestCase):
payload = {
"mode": "scan",
"monType": "all",
"target": "agents",
"client": None,
"site": None,
@@ -616,6 +631,7 @@ class TestAgentViews(TacticalTestCase):
payload = {
"mode": "install",
"monType": "all",
"target": "client",
"client": self.agent.client.id,
"site": None,
@@ -637,7 +653,7 @@ class TestAgentViews(TacticalTestCase):
# TODO mock the script
self.check_not_authenticated("post", url)
self.check_not_authenticated("post", url) """
@patch("agents.models.Agent.nats_cmd")
def test_recover_mesh(self, nats_cmd):
@@ -740,131 +756,82 @@ class TestAgentTasks(TacticalTestCase):
self.setup_coresettings()
@patch("agents.models.Agent.nats_cmd")
@patch("agents.models.Agent.salt_api_async", return_value=None)
def test_get_wmi_detail_task(self, salt_api_async, nats_cmd):
self.agent_salt = baker.make_recipe("agents.agent", version="1.0.2")
ret = get_wmi_detail_task.s(self.agent_salt.pk).apply()
salt_api_async.assert_called_with(timeout=30, func="win_agent.local_sys_info")
self.assertEqual(ret.status, "SUCCESS")
def test_agent_update(self, nats_cmd):
from agents.tasks import agent_update
self.agent_nats = baker.make_recipe("agents.agent", version="1.1.0")
ret = get_wmi_detail_task.s(self.agent_nats.pk).apply()
nats_cmd.assert_called_with({"func": "sysinfo"}, wait=False)
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.salt_api_cmd")
def test_sync_salt_modules_task(self, salt_api_cmd):
self.agent = baker.make_recipe("agents.agent")
salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]}
ret = sync_salt_modules_task.s(self.agent.pk).apply()
salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules")
agent_noarch = baker.make_recipe(
"agents.agent",
operating_system="Error getting OS",
version="1.1.11",
)
r = agent_update(agent_noarch.pk)
self.assertEqual(r, "noarch")
self.assertEqual(
ret.result, f"Successfully synced salt modules on {self.agent.hostname}"
)
self.assertEqual(ret.status, "SUCCESS")
salt_api_cmd.return_value = "timeout"
ret = sync_salt_modules_task.s(self.agent.pk).apply()
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
salt_api_cmd.return_value = "error"
ret = sync_salt_modules_task.s(self.agent.pk).apply()
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
@patch("agents.models.Agent.salt_batch_async", return_value=None)
@patch("agents.tasks.sleep", return_value=None)
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
# chunks of 50, should run 4 times
baker.make_recipe(
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
)
baker.make_recipe(
"agents.overdue_agent",
last_seen=djangotime.now() - djangotime.timedelta(minutes=9),
_quantity=115,
)
ret = batch_sync_modules_task.s().apply()
self.assertEqual(salt_batch_async.call_count, 4)
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.nats_cmd")
@patch("agents.models.Agent.salt_batch_async", return_value=None)
@patch("agents.tasks.sleep", return_value=None)
def test_batch_sysinfo_task(self, mock_sleep, salt_batch_async, nats_cmd):
self.agents_nats = baker.make_recipe(
"agents.agent", version="1.1.0", _quantity=20
)
# test nats
ret = batch_sysinfo_task.s().apply()
self.assertEqual(nats_cmd.call_count, 20)
nats_cmd.assert_called_with({"func": "sysinfo"}, wait=False)
self.assertEqual(ret.status, "SUCCESS")
self.agents_salt = baker.make_recipe(
"agents.agent", version="1.0.2", _quantity=70
PendingAction.objects.filter(
agent=agent_noarch, action_type="agentupdate"
).count(),
0,
)
minions = [i.salt_id for i in self.agents_salt]
ret = batch_sysinfo_task.s().apply()
self.assertEqual(salt_batch_async.call_count, 1)
salt_batch_async.assert_called_with(
minions=minions, func="win_agent.local_sys_info"
)
self.assertEqual(ret.status, "SUCCESS")
salt_batch_async.reset_mock()
[i.delete() for i in self.agents_salt]
# test old agents, should not run
self.agents_old = baker.make_recipe(
"agents.agent", version="0.10.2", _quantity=70
)
ret = batch_sysinfo_task.s().apply()
salt_batch_async.assert_not_called()
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.salt_api_async", return_value=None)
@patch("agents.tasks.sleep", return_value=None)
def test_update_salt_minion_task(self, mock_sleep, salt_api_async):
# test agents that need salt update
self.agents = baker.make_recipe(
agent64_111 = baker.make_recipe(
"agents.agent",
version=settings.LATEST_AGENT_VER,
salt_ver="1.0.3",
_quantity=53,
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
version="1.1.11",
)
ret = update_salt_minion_task.s().apply()
self.assertEqual(salt_api_async.call_count, 53)
self.assertEqual(ret.status, "SUCCESS")
[i.delete() for i in self.agents]
salt_api_async.reset_mock()
# test agents that need salt update but agent version too low
self.agents = baker.make_recipe(
r = agent_update(agent64_111.pk)
self.assertEqual(r, "created")
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
self.assertEqual(action.action_type, "agentupdate")
self.assertEqual(action.status, "pending")
self.assertEqual(
action.details["url"],
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
)
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
self.assertEqual(action.details["version"], "1.3.0")
agent_64_130 = baker.make_recipe(
"agents.agent",
version="0.10.2",
salt_ver="1.0.3",
_quantity=53,
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
version="1.3.0",
)
nats_cmd.return_value = "ok"
r = agent_update(agent_64_130.pk)
self.assertEqual(r, "created")
nats_cmd.assert_called_with(
{
"func": "agentupdate",
"payload": {
"url": settings.DL_64,
"version": settings.LATEST_AGENT_VER,
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
},
},
wait=False,
)
ret = update_salt_minion_task.s().apply()
self.assertEqual(ret.status, "SUCCESS")
salt_api_async.assert_not_called()
[i.delete() for i in self.agents]
salt_api_async.reset_mock()
# test agents already on latest salt ver
self.agents = baker.make_recipe(
agent64_old = baker.make_recipe(
"agents.agent",
version=settings.LATEST_AGENT_VER,
salt_ver=settings.LATEST_SALT_VER,
_quantity=53,
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
version="1.2.1",
)
nats_cmd.return_value = "ok"
r = agent_update(agent64_old.pk)
self.assertEqual(r, "created")
nats_cmd.assert_called_with(
{
"func": "agentupdate",
"payload": {
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
"version": "1.3.0",
"inno": "winagent-v1.3.0.exe",
},
},
wait=False,
)
ret = update_salt_minion_task.s().apply()
self.assertEqual(ret.status, "SUCCESS")
salt_api_async.assert_not_called()
@patch("agents.models.Agent.salt_api_async")
""" @patch("agents.models.Agent.salt_api_async")
@patch("agents.tasks.sleep", return_value=None)
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
# test 64bit golang agent
@@ -967,4 +934,4 @@ class TestAgentTasks(TacticalTestCase):
"url": OLD_32_PY_AGENT,
},
)
self.assertEqual(ret.status, "SUCCESS")
self.assertEqual(ret.status, "SUCCESS") """

View File

@@ -12,7 +12,6 @@ urlpatterns = [
path("<pk>/agentdetail/", views.agent_detail),
path("<int:pk>/meshcentral/", views.meshcentral),
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
path("poweraction/", views.power_action),
path("uninstall/", views.uninstall),
path("editagent/", views.edit_agent),
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
@@ -20,7 +19,7 @@ urlpatterns = [
path("updateagents/", views.update_agents),
path("<pk>/getprocs/", views.get_processes),
path("<pk>/<pid>/killproc/", views.kill_proc),
path("rebootlater/", views.reboot_later),
path("reboot/", views.Reboot.as_view()),
path("installagent/", views.install_agent),
path("<int:pk>/ping/", views.ping),
path("recover/", views.recover),
@@ -31,4 +30,5 @@ urlpatterns = [
path("bulk/", views.bulk),
path("agent_counts/", views.agent_counts),
path("maintenance/", views.agent_maintenance),
path("<int:pk>/wmi/", views.WMI.as_view()),
]

View File

@@ -3,8 +3,11 @@ from loguru import logger
import os
import subprocess
import pytz
import random
import string
import datetime as dt
from packaging import version as pyver
from typing import List
from django.conf import settings
from django.shortcuts import get_object_or_404
@@ -18,7 +21,7 @@ from rest_framework import status, generics
from .models import Agent, AgentOutage, RecoveryAction, Note
from core.models import CoreSettings
from scripts.models import Script
from logs.models import AuditLog
from logs.models import AuditLog, PendingAction
from .serializers import (
AgentSerializer,
@@ -27,11 +30,15 @@ from .serializers import (
AgentEditSerializer,
NoteSerializer,
NotesSerializer,
AgentOverdueActionSerializer,
)
from winupdate.serializers import WinUpdatePolicySerializer
from .tasks import uninstall_agent_task, send_agent_update_task
from winupdate.tasks import bulk_check_for_updates_task
from .tasks import (
send_agent_update_task,
run_script_email_results_task,
)
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
from tacticalrmm.utils import notify_error, reload_nats
@@ -61,38 +68,32 @@ def update_agents(request):
@api_view()
def ping(request, pk):
agent = get_object_or_404(Agent, pk=pk)
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=10))
status = "offline"
if agent.has_nats:
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
if r == "pong":
status = "online"
if r == "timeout" or r == "natsdown":
return Response({"name": agent.hostname, "status": "offline"})
elif r == "pong":
return Response({"name": agent.hostname, "status": "online"})
return Response({"name": agent.hostname, "status": "offline"})
return Response({"name": agent.hostname, "status": status})
@api_view(["DELETE"])
def uninstall(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
if agent.has_nats:
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
salt_id = agent.salt_id
name = agent.hostname
agent.delete()
reload_nats()
uninstall_agent_task.delay(salt_id)
return Response(f"{name} will now be uninstalled.")
@api_view(["PATCH"])
def edit_agent(request):
agent = get_object_or_404(Agent, pk=request.data["id"])
old_site = agent.site.pk
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
a_serializer.is_valid(raise_exception=True)
a_serializer.save()
@@ -104,6 +105,11 @@ def edit_agent(request):
p_serializer.is_valid(raise_exception=True)
p_serializer.save()
# check if site changed and initiate generating correct policies
if old_site != request.data["site"]:
agent.generate_checks_from_policies()
agent.generate_tasks_from_policies()
return Response("ok")
@@ -119,16 +125,9 @@ def meshcentral(request, pk):
if token == "err":
return notify_error("Invalid mesh token")
control = (
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=11&hide=31"
)
terminal = (
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=12&hide=31"
)
file = (
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=13&hide=31"
)
webrdp = f"{core.mesh_site}/mstsc.html?login={token}&node={agent.mesh_node_id}"
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
@@ -137,7 +136,6 @@ def meshcentral(request, pk):
"control": control,
"terminal": terminal,
"file": file,
"webrdp": webrdp,
"status": agent.status,
"client": agent.client.name,
"site": agent.site.name,
@@ -154,12 +152,12 @@ def agent_detail(request, pk):
@api_view()
def get_processes(request, pk):
agent = get_object_or_404(Agent, pk=pk)
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
return notify_error("Requires agent version 1.2.0 or greater")
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
if r == "timeout":
return notify_error("Unable to contact the agent")
return Response(r)
@@ -186,34 +184,22 @@ def get_event_log(request, pk, logtype, days):
agent = get_object_or_404(Agent, pk=pk)
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
timeout = 180 if logtype == "Security" else 30
data = {
"func": "eventlog",
"timeout": 30,
"timeout": timeout,
"payload": {
"logname": logtype,
"days": str(days),
},
}
r = asyncio.run(agent.nats_cmd(data, timeout=32))
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
if r == "timeout":
return notify_error("Unable to contact the agent")
return Response(r)
@api_view(["POST"])
def power_action(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
if request.data["action"] == "rebootnow":
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
if r != "ok":
return notify_error("Unable to contact the agent")
return Response("ok")
@api_view(["POST"])
def send_raw_cmd(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
@@ -349,47 +335,72 @@ def by_site(request, sitepk):
@api_view(["POST"])
def overdue_action(request):
pk = request.data["pk"]
alert_type = request.data["alertType"]
action = request.data["action"]
agent = get_object_or_404(Agent, pk=pk)
if alert_type == "email" and action == "enabled":
agent.overdue_email_alert = True
agent.save(update_fields=["overdue_email_alert"])
elif alert_type == "email" and action == "disabled":
agent.overdue_email_alert = False
agent.save(update_fields=["overdue_email_alert"])
elif alert_type == "text" and action == "enabled":
agent.overdue_text_alert = True
agent.save(update_fields=["overdue_text_alert"])
elif alert_type == "text" and action == "disabled":
agent.overdue_text_alert = False
agent.save(update_fields=["overdue_text_alert"])
else:
return Response(
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
)
agent = get_object_or_404(Agent, pk=request.data["pk"])
serializer = AgentOverdueActionSerializer(
instance=agent, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(agent.hostname)
@api_view(["POST"])
def reboot_later(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
date_time = request.data["datetime"]
class Reboot(APIView):
# reboot now
def post(self, request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
try:
obj = dt.datetime.strptime(date_time, "%Y-%m-%d %H:%M")
except Exception:
return notify_error("Invalid date")
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
if r != "ok":
return notify_error("Unable to contact the agent")
r = agent.schedule_reboot(obj)
return Response("ok")
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "failed":
return notify_error("Something went wrong")
# reboot later
def patch(self, request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_gotasks:
return notify_error("Requires agent version 1.1.1 or greater")
return Response(r["msg"])
try:
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
except Exception:
return notify_error("Invalid date")
task_name = "TacticalRMM_SchedReboot_" + "".join(
random.choice(string.ascii_letters) for _ in range(10)
)
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "schedreboot",
"trigger": "once",
"name": task_name,
"year": int(dt.datetime.strftime(obj, "%Y")),
"month": dt.datetime.strftime(obj, "%B"),
"day": int(dt.datetime.strftime(obj, "%d")),
"hour": int(dt.datetime.strftime(obj, "%H")),
"min": int(dt.datetime.strftime(obj, "%M")),
},
}
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
nats_data["schedtaskpayload"]["deleteafter"] = True
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
if r != "ok":
return notify_error(r)
details = {"taskname": task_name, "time": str(obj)}
PendingAction.objects.create(
agent=agent, action_type="schedreboot", details=details
)
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
return Response(
{"time": nice_time, "agent": agent.hostname, "task_name": task_name}
)
@api_view(["POST"])
@@ -450,7 +461,7 @@ def install_agent(request):
f"GOARCH={goarch}",
go_bin,
"build",
f"-ldflags=\"-X 'main.Inno={inno}'",
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
f"-X 'main.Api={api}'",
f"-X 'main.Client={client_id}'",
f"-X 'main.Site={site_id}'",
@@ -550,7 +561,7 @@ def install_agent(request):
"&&",
"timeout",
"/t",
"20",
"10",
"/nobreak",
">",
"NUL",
@@ -580,8 +591,6 @@ def install_agent(request):
resp = {
"cmd": " ".join(str(i) for i in cmd),
"url": download_url,
"salt64": settings.SALT_64,
"salt32": settings.SALT_32,
}
return Response(resp)
@@ -642,17 +651,12 @@ def recover(request):
return notify_error("Only available in agent version greater than 0.9.5")
if not agent.has_nats:
if mode == "tacagent" or mode == "checkrunner" or mode == "rpc":
if mode == "tacagent" or mode == "rpc":
return notify_error("Requires agent version 1.1.0 or greater")
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
if agent.has_nats:
if (
mode == "tacagent"
or mode == "checkrunner"
or mode == "salt"
or mode == "mesh"
):
if mode == "tacagent" or mode == "mesh":
data = {"func": "recover", "payload": {"mode": mode}}
r = asyncio.run(agent.nats_cmd(data, timeout=10))
if r == "ok":
@@ -711,6 +715,21 @@ def run_script(request):
if output == "wait":
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
return Response(r)
elif output == "email":
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
return notify_error("Requires agent version 1.1.12 or greater")
emails = (
[] if request.data["emailmode"] == "default" else request.data["emails"]
)
run_script_email_results_task.delay(
agentpk=agent.pk,
scriptpk=script.pk,
nats_timeout=req_timeout,
nats_data=data,
emails=emails,
)
return Response(f"{script.name} will now be run on {agent.hostname}")
else:
asyncio.run(agent.nats_cmd(data, wait=False))
return Response(f"{script.name} will now be run on {agent.hostname}")
@@ -794,12 +813,16 @@ def bulk(request):
elif request.data["target"] == "agents":
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
elif request.data["target"] == "all":
q = Agent.objects.all()
q = Agent.objects.only("pk", "monitoring_type")
else:
return notify_error("Something went wrong")
minions = [agent.salt_id for agent in q]
agents = [agent.pk for agent in q]
if request.data["monType"] == "servers":
q = q.filter(monitoring_type="server")
elif request.data["monType"] == "workstations":
q = q.filter(monitoring_type="workstation")
agents: List[int] = [agent.pk for agent in q]
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
@@ -817,14 +840,12 @@ def bulk(request):
return Response(f"{script.name} will now be run on {len(agents)} agents")
elif request.data["mode"] == "install":
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
if r == "timeout":
return notify_error("Salt API not running")
bulk_install_updates_task.delay(agents)
return Response(
f"Pending updates will now be installed on {len(agents)} agents"
)
elif request.data["mode"] == "scan":
bulk_check_for_updates_task.delay(minions=minions)
bulk_check_for_updates_task.delay(agents)
return Response(f"Patch status scan will now run on {len(agents)} agents")
return notify_error("Something went wrong")
@@ -871,3 +892,15 @@ def agent_maintenance(request):
return notify_error("Invalid data")
return Response("ok")
class WMI(APIView):
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
return notify_error("Requires agent version 1.1.2 or greater")
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
if r != "ok":
return notify_error("Unable to contact the agent")
return Response("ok")

View File

@@ -7,19 +7,25 @@ import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('checks', '0010_auto_20200922_1344'),
('alerts', '0002_auto_20200815_1618'),
("checks", "0010_auto_20200922_1344"),
("alerts", "0002_auto_20200815_1618"),
]
operations = [
migrations.AddField(
model_name='alert',
name='assigned_check',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'),
model_name="alert",
name="assigned_check",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="alert",
to="checks.check",
),
),
migrations.AlterField(
model_name='alert',
name='alert_time',
model_name="alert",
name="alert_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
]

View File

@@ -37,7 +37,7 @@ class Alert(models.Model):
@classmethod
def create_availability_alert(cls, agent):
pass
@classmethod
def create_check_alert(cls, check):
pass
pass

View File

@@ -16,4 +16,4 @@ class AlertSerializer(ModelSerializer):
class Meta:
model = Alert
fields = "__all__"
fields = "__all__"

View File

@@ -1,5 +0,0 @@
from django.apps import AppConfig
class ApiConfig(AppConfig):
name = "api"

View File

@@ -1,11 +0,0 @@
from django.urls import path
from . import views
from apiv3 import views as v3_views
urlpatterns = [
path("triggerpatchscan/", views.trigger_patch_scan),
path("<int:pk>/checkrunner/", views.CheckRunner.as_view()),
path("<int:pk>/taskrunner/", views.TaskRunner.as_view()),
path("<int:pk>/saltinfo/", views.SaltInfo.as_view()),
path("<int:pk>/meshinfo/", v3_views.MeshInfo.as_view()),
]

View File

@@ -1,149 +0,0 @@
from loguru import logger
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import (
api_view,
authentication_classes,
permission_classes,
)
from agents.models import Agent
from checks.models import Check
from autotasks.models import AutomatedTask
from winupdate.tasks import check_for_updates_task
from autotasks.serializers import TaskRunnerGetSerializer, TaskRunnerPatchSerializer
from checks.serializers import CheckRunnerGetSerializer, CheckResultsSerializer
logger.configure(**settings.LOG_CONFIG)
@api_view(["PATCH"])
@authentication_classes((TokenAuthentication,))
@permission_classes((IsAuthenticated,))
def trigger_patch_scan(request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
reboot_policy = agent.get_patch_policy().reboot_after_install
reboot = False
if reboot_policy == "always":
reboot = True
if request.data["reboot"]:
if reboot_policy == "required":
reboot = True
elif reboot_policy == "never":
agent.needs_reboot = True
agent.save(update_fields=["needs_reboot"])
if reboot:
r = agent.salt_api_cmd(
timeout=15,
func="system.reboot",
arg=7,
kwargs={"in_seconds": True},
)
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
else:
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
else:
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
return Response("ok")
class CheckRunner(APIView):
"""
For windows agent
"""
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
checks = Check.objects.filter(agent__pk=pk, overriden_by_policy=False)
ret = {
"agent": agent.pk,
"check_interval": agent.check_interval,
"checks": CheckRunnerGetSerializer(checks, many=True).data,
}
return Response(ret)
def patch(self, request, pk):
check = get_object_or_404(Check, pk=pk)
if check.check_type != "cpuload" and check.check_type != "memory":
serializer = CheckResultsSerializer(
instance=check, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save(last_run=djangotime.now())
else:
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
check.handle_check(request.data)
return Response("ok")
class TaskRunner(APIView):
"""
For the windows python agent
"""
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk):
task = get_object_or_404(AutomatedTask, pk=pk)
return Response(TaskRunnerGetSerializer(task).data)
def patch(self, request, pk):
task = get_object_or_404(AutomatedTask, pk=pk)
serializer = TaskRunnerPatchSerializer(
instance=task, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save(last_run=djangotime.now())
return Response("ok")
class SaltInfo(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
ret = {
"latestVer": settings.LATEST_SALT_VER,
"currentVer": agent.salt_ver,
"salt_id": agent.salt_id,
}
return Response(ret)
def patch(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
agent.salt_ver = request.data["ver"]
agent.save(update_fields=["salt_ver"])
return Response("ok")

View File

@@ -1,5 +0,0 @@
from django.apps import AppConfig
class Apiv2Config(AppConfig):
name = 'apiv2'

View File

@@ -1,38 +0,0 @@
from tacticalrmm.test import TacticalTestCase
from unittest.mock import patch
from model_bakery import baker
from itertools import cycle
class TestAPIv2(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
@patch("agents.models.Agent.salt_api_cmd")
def test_sync_modules(self, mock_ret):
# setup data
agent = baker.make_recipe("agents.agent")
url = "/api/v2/saltminion/"
payload = {"agent_id": agent.agent_id}
mock_ret.return_value = "error"
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 400)
mock_ret.return_value = []
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "Modules are already in sync")
mock_ret.return_value = ["modules.win_agent"]
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "Successfully synced salt modules")
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
r = self.client.patch(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "Successfully synced salt modules")
self.check_not_authenticated("patch", url)

View File

@@ -1,14 +0,0 @@
from django.urls import path
from . import views
from apiv3 import views as v3_views
urlpatterns = [
path("newagent/", v3_views.NewAgent.as_view()),
path("meshexe/", v3_views.MeshExe.as_view()),
path("saltminion/", v3_views.SaltMinion.as_view()),
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
path("sysinfo/", v3_views.SysInfo.as_view()),
path("hello/", v3_views.Hello.as_view()),
path("checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
]

View File

@@ -1,41 +0,0 @@
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from agents.models import Agent
from checks.models import Check
from checks.serializers import CheckRunnerGetSerializerV2
class CheckRunner(APIView):
"""
For the windows python agent
"""
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
agent.last_seen = djangotime.now()
agent.save(update_fields=["last_seen"])
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
ret = {
"agent": agent.pk,
"check_interval": agent.check_interval,
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
}
return Response(ret)
def patch(self, request):
check = get_object_or_404(Check, pk=request.data["id"])
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
status = check.handle_checkv2(request.data)
return Response(status)

View File

@@ -26,34 +26,13 @@ class TestAPIv3(TacticalTestCase):
self.check_not_authenticated("get", url)
def test_get_salt_minion(self):
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertIn("latestVer", r.json().keys())
self.assertIn("currentVer", r.json().keys())
self.assertIn("salt_id", r.json().keys())
self.assertIn("downloadURL", r.json().keys())
r2 = self.client.get(url2)
self.assertEqual(r2.status_code, 200)
self.check_not_authenticated("get", url)
self.check_not_authenticated("get", url2)
def test_get_mesh_info(self):
url = f"/api/v3/{self.agent.pk}/meshinfo/"
url2 = f"/api/v1/{self.agent.pk}/meshinfo/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
r = self.client.get(url2)
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("get", url)
self.check_not_authenticated("get", url2)
def test_get_winupdater(self):
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
@@ -65,7 +44,7 @@ class TestAPIv3(TacticalTestCase):
def test_sysinfo(self):
# TODO replace this with golang wmi sample data
url = f"/api/v3/sysinfo/"
url = "/api/v3/sysinfo/"
with open(
os.path.join(
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
@@ -81,7 +60,7 @@ class TestAPIv3(TacticalTestCase):
self.check_not_authenticated("patch", url)
def test_hello_patch(self):
url = f"/api/v3/hello/"
url = "/api/v3/hello/"
payload = {
"agent_id": self.agent.agent_id,
"logged_in_username": "None",
@@ -96,3 +75,12 @@ class TestAPIv3(TacticalTestCase):
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("patch", url)
def test_checkrunner_interval(self):
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)
self.assertEqual(
r.json(),
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
)

View File

@@ -2,16 +2,18 @@ from django.urls import path
from . import views
urlpatterns = [
path("checkin/", views.CheckIn.as_view()),
path("hello/", views.Hello.as_view()),
path("checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
path("saltminion/", views.SaltMinion.as_view()),
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
path("meshexe/", views.MeshExe.as_view()),
path("sysinfo/", views.SysInfo.as_view()),
path("newagent/", views.NewAgent.as_view()),
path("winupdater/", views.WinUpdater.as_view()),
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
path("software/", views.Software.as_view()),
path("installer/", views.Installer.as_view()),
]

View File

@@ -2,12 +2,12 @@ import asyncio
import os
import requests
from loguru import logger
from packaging import version as pyver
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from django.http import HttpResponse
from rest_framework import serializers
from rest_framework.response import Response
from rest_framework.views import APIView
@@ -20,7 +20,8 @@ from checks.models import Check
from autotasks.models import AutomatedTask
from accounts.models import User
from winupdate.models import WinUpdatePolicy
from checks.serializers import CheckRunnerGetSerializerV3
from software.models import InstalledSoftware
from checks.serializers import CheckRunnerGetSerializer
from agents.serializers import WinAgentSerializer
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
from winupdate.serializers import ApprovedUpdateSerializer
@@ -28,18 +29,110 @@ from winupdate.serializers import ApprovedUpdateSerializer
from agents.tasks import (
agent_recovery_email_task,
agent_recovery_sms_task,
get_wmi_detail_task,
sync_salt_modules_task,
)
from winupdate.tasks import check_for_updates_task
from software.tasks import get_installed_software, install_chocolatey
from checks.utils import bytes2human
from tacticalrmm.utils import notify_error, reload_nats
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
logger.configure(**settings.LOG_CONFIG)
class CheckIn(APIView):
"""
The agent's checkin endpoint
patch: called every 45 to 110 seconds, handles agent updates and recovery
put: called every 5 to 10 minutes, handles basic system info
post: called once on windows service startup
"""
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def patch(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent.version = request.data["version"]
agent.last_seen = djangotime.now()
agent.save(update_fields=["version", "last_seen"])
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
last_outage = agent.agentoutages.last()
last_outage.recovery_time = djangotime.now()
last_outage.save(update_fields=["recovery_time"])
if agent.overdue_email_alert:
agent_recovery_email_task.delay(pk=last_outage.pk)
if agent.overdue_text_alert:
agent_recovery_sms_task.delay(pk=last_outage.pk)
recovery = agent.recoveryactions.filter(last_run=None).last()
if recovery is not None:
recovery.last_run = djangotime.now()
recovery.save(update_fields=["last_run"])
return Response(recovery.send())
# handle agent update
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
update = agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).last()
update.status = "completed"
update.save(update_fields=["status"])
return Response(update.details)
# get any pending actions
if agent.pendingactions.filter(status="pending").exists():
agent.handle_pending_actions()
return Response("ok")
def put(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
if "disks" in request.data.keys():
disks = request.data["disks"]
new = []
# python agent
if isinstance(disks, dict):
for k, v in disks.items():
new.append(v)
else:
# golang agent
for disk in disks:
tmp = {}
for k, v in disk.items():
tmp["device"] = disk["device"]
tmp["fstype"] = disk["fstype"]
tmp["total"] = bytes2human(disk["total"])
tmp["used"] = bytes2human(disk["used"])
tmp["free"] = bytes2human(disk["free"])
tmp["percent"] = int(disk["percent"])
new.append(tmp)
serializer.save(disks=new)
return Response("ok")
if "logged_in_username" in request.data.keys():
if request.data["logged_in_username"] != "None":
serializer.save(last_logged_in_user=request.data["logged_in_username"])
return Response("ok")
serializer.save()
return Response("ok")
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save(last_seen=djangotime.now())
return Response("ok")
class Hello(APIView):
#### DEPRECATED, for agents <= 1.1.9 ####
"""
The agent's checkin endpoint
patch: called every 30 to 120 seconds
@@ -121,17 +214,6 @@ class Hello(APIView):
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save(last_seen=djangotime.now())
sync_salt_modules_task.delay(agent.pk)
get_installed_software.delay(agent.pk)
get_wmi_detail_task.delay(agent.pk)
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
)
if not agent.choco_installed:
install_chocolatey.delay(agent.pk, wait=True)
return Response("ok")
@@ -150,31 +232,28 @@ class CheckRunner(APIView):
ret = {
"agent": agent.pk,
"check_interval": agent.check_interval,
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
"checks": CheckRunnerGetSerializer(checks, many=True).data,
}
return Response(ret)
def patch(self, request):
from logs.models import AuditLog
check = get_object_or_404(Check, pk=request.data["id"])
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
status = check.handle_checkv2(request.data)
# create audit entry
AuditLog.objects.create(
username=check.agent.hostname,
agent=check.agent.hostname,
object_type="agent",
action="check_run",
message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}",
after_value=Check.serialize(check),
)
return Response(status)
class CheckRunnerInterval(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
class TaskRunner(APIView):
"""
For the windows golang agent
@@ -213,77 +292,6 @@ class TaskRunner(APIView):
return Response("ok")
class SaltMinion(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
ret = {
"latestVer": settings.LATEST_SALT_VER,
"currentVer": agent.salt_ver,
"salt_id": agent.salt_id,
"downloadURL": agent.winsalt_dl,
}
return Response(ret)
def post(self, request):
# accept the salt key
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
if agent.salt_id != request.data["saltid"]:
return notify_error("Salt keys do not match")
try:
resp = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "wheel",
"fun": "key.accept",
"match": request.data["saltid"],
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=30,
)
except Exception:
return notify_error("No communication between agent and salt-api")
try:
data = resp.json()["return"][0]["data"]
minion = data["return"]["minions"][0]
except Exception:
return notify_error("Key error")
if data["success"] and minion == request.data["saltid"]:
return Response("Salt key was accepted")
else:
return notify_error("Not accepted")
def patch(self, request):
# sync modules
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
if r == "timeout" or r == "error":
return notify_error("Failed to sync salt modules")
if isinstance(r, list) and any("modules" in i for i in r):
return Response("Successfully synced salt modules")
elif isinstance(r, list) and not r:
return Response("Modules are already in sync")
else:
return notify_error(f"Failed to sync salt modules: {str(r)}")
def put(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
agent.salt_ver = request.data["ver"]
agent.save(update_fields=["salt_ver"])
return Response("ok")
class WinUpdater(APIView):
authentication_classes = [TokenAuthentication]
@@ -324,6 +332,7 @@ class WinUpdater(APIView):
update.installed = True
update.save(update_fields=["result", "downloaded", "installed"])
agent.delete_superseded_updates()
return Response("ok")
# agent calls this after it's finished installing all patches
@@ -345,19 +354,11 @@ class WinUpdater(APIView):
if reboot:
if agent.has_nats:
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
else:
agent.salt_api_async(
func="system.reboot",
arg=7,
kwargs={"in_seconds": True},
logger.info(
f"{agent.hostname} is rebooting after updates were installed."
)
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
else:
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
agent.delete_superseded_updates()
return Response("ok")
@@ -386,7 +387,15 @@ class MeshInfo(APIView):
def patch(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
agent.mesh_node_id = request.data["nodeidhex"]
if "nodeidhex" in request.data:
# agent <= 1.1.0
nodeid = request.data["nodeidhex"]
else:
# agent >= 1.1.1
nodeid = request.data["nodeid"]
agent.mesh_node_id = nodeid
agent.save(update_fields=["mesh_node_id"])
return Response("ok")
@@ -476,3 +485,42 @@ class NewAgent(APIView):
"token": token.key,
}
)
class Software(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def post(self, request):
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
raw: SoftwareList = request.data["software"]
if not isinstance(raw, list):
return notify_error("err")
sw = filter_software(raw)
if not InstalledSoftware.objects.filter(agent=agent).exists():
InstalledSoftware(agent=agent, software=sw).save()
else:
s = agent.installedsoftware_set.first()
s.software = sw
s.save(update_fields=["software"])
return Response("ok")
class Installer(APIView):
def get(self, request):
# used to check if token is valid. will return 401 if not
return Response("ok")
def post(self, request):
if "version" not in request.data:
return notify_error("Invalid data")
ver = request.data["version"]
if pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER):
return notify_error(
f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM"
)
return Response("ok")

View File

@@ -6,11 +6,11 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('automation', '0005_auto_20200922_1344'),
("automation", "0005_auto_20200922_1344"),
]
operations = [
migrations.DeleteModel(
name='PolicyExclusions',
name="PolicyExclusions",
),
]

View File

@@ -1,6 +1,5 @@
from django.db import models
from agents.models import Agent
from clients.models import Site, Client
from core.models import CoreSettings
from logs.models import BaseAuditModel
@@ -58,6 +57,11 @@ class Policy(BaseAuditModel):
@staticmethod
def cascade_policy_tasks(agent):
from autotasks.tasks import delete_win_task_schedule
from autotasks.models import AutomatedTask
from logs.models import PendingAction
# List of all tasks to be applied
tasks = list()
added_task_pks = list()
@@ -80,7 +84,7 @@ class Policy(BaseAuditModel):
default_policy = CoreSettings.objects.first().server_policy
client_policy = client.server_policy
site_policy = site.server_policy
else:
elif agent.monitoring_type == "workstation":
default_policy = CoreSettings.objects.first().workstation_policy
client_policy = client.workstation_policy
site_policy = site.workstation_policy
@@ -107,6 +111,33 @@ class Policy(BaseAuditModel):
tasks.append(task)
added_task_pks.append(task.pk)
# remove policy tasks from agent not included in policy
for task in agent.autotasks.filter(
parent_task__in=[
taskpk
for taskpk in agent_tasks_parent_pks
if taskpk not in added_task_pks
]
):
delete_win_task_schedule.delay(task.pk)
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
for action in agent.pendingactions.exclude(status="completed"):
task = AutomatedTask.objects.get(pk=action.details["task_id"])
if (
task.parent_task in agent_tasks_parent_pks
and task.parent_task in added_task_pks
):
agent.remove_matching_pending_task_actions(task.id)
PendingAction(
agent=agent,
action_type="taskaction",
details={"action": "taskcreate", "task_id": task.id},
).save()
task.sync_status = "notsynced"
task.save(update_fields=["sync_status"])
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
@staticmethod
@@ -132,7 +163,7 @@ class Policy(BaseAuditModel):
default_policy = CoreSettings.objects.first().server_policy
client_policy = client.server_policy
site_policy = site.server_policy
else:
elif agent.monitoring_type == "workstation":
default_policy = CoreSettings.objects.first().workstation_policy
client_policy = client.workstation_policy
site_policy = site.workstation_policy
@@ -280,6 +311,15 @@ class Policy(BaseAuditModel):
+ eventlog_checks
)
# remove policy checks from agent that fell out of policy scope
agent.agentchecks.filter(
parent_check__in=[
checkpk
for checkpk in agent_checks_parent_pks
if checkpk not in [check.pk for check in final_list]
]
).delete()
return [
check for check in final_list if check.pk not in agent_checks_parent_pks
]

View File

@@ -6,46 +6,46 @@ from tacticalrmm.celery import app
@app.task
def generate_agent_checks_from_policies_task(
###
# copies the policy checks to all affected agents
#
# clear: clears all policy checks first
# create_tasks: also create tasks after checks are generated
###
policypk,
clear=False,
create_tasks=False,
):
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
policy = Policy.objects.get(pk=policypk)
for agent in policy.related_agents():
agent.generate_checks_from_policies(clear=clear)
if policy.is_default_server_policy and policy.is_default_workstation_policy:
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
elif policy.is_default_server_policy:
agents = Agent.objects.filter(monitoring_type="server").only(
"pk", "monitoring_type"
)
elif policy.is_default_workstation_policy:
agents = Agent.objects.filter(monitoring_type="workstation").only(
"pk", "monitoring_type"
)
else:
agents = policy.related_agents()
for agent in agents:
agent.generate_checks_from_policies()
if create_tasks:
agent.generate_tasks_from_policies(
clear=clear,
)
agent.generate_tasks_from_policies()
@app.task
def generate_agent_checks_by_location_task(
location, mon_type, clear=False, create_tasks=False
):
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
agent.generate_checks_from_policies(clear=clear)
agent.generate_checks_from_policies()
if create_tasks:
agent.generate_tasks_from_policies(clear=clear)
agent.generate_tasks_from_policies()
@app.task
def generate_all_agent_checks_task(mon_type, clear=False, create_tasks=False):
def generate_all_agent_checks_task(mon_type, create_tasks=False):
for agent in Agent.objects.filter(monitoring_type=mon_type):
agent.generate_checks_from_policies(clear=clear)
agent.generate_checks_from_policies()
if create_tasks:
agent.generate_tasks_from_policies(clear=clear)
agent.generate_tasks_from_policies()
@app.task
@@ -83,18 +83,32 @@ def update_policy_check_fields_task(checkpk):
@app.task
def generate_agent_tasks_from_policies_task(policypk, clear=False):
def generate_agent_tasks_from_policies_task(policypk):
policy = Policy.objects.get(pk=policypk)
for agent in policy.related_agents():
agent.generate_tasks_from_policies(clear=clear)
if policy.is_default_server_policy and policy.is_default_workstation_policy:
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
elif policy.is_default_server_policy:
agents = Agent.objects.filter(monitoring_type="server").only(
"pk", "monitoring_type"
)
elif policy.is_default_workstation_policy:
agents = Agent.objects.filter(monitoring_type="workstation").only(
"pk", "monitoring_type"
)
else:
agents = policy.related_agents()
for agent in agents:
agent.generate_tasks_from_policies()
@app.task
def generate_agent_tasks_by_location_task(location, mon_type, clear=False):
def generate_agent_tasks_by_location_task(location, mon_type):
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
agent.generate_tasks_from_policies(clear=clear)
agent.generate_tasks_from_policies()
@app.task

View File

@@ -121,9 +121,7 @@ class TestPolicyViews(TacticalTestCase):
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
mock_checks_task.assert_called_with(
policypk=policy.pk, clear=True, create_tasks=True
)
mock_checks_task.assert_called_with(policypk=policy.pk, create_tasks=True)
self.check_not_authenticated("put", url)
@@ -140,8 +138,8 @@ class TestPolicyViews(TacticalTestCase):
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
mock_checks_task.assert_called_with(policypk=policy.pk, clear=True)
mock_tasks_task.assert_called_with(policypk=policy.pk, clear=True)
mock_checks_task.assert_called_with(policypk=policy.pk)
mock_tasks_task.assert_called_with(policypk=policy.pk)
self.check_not_authenticated("delete", url)
@@ -298,7 +296,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -311,7 +308,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -324,7 +320,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site_id": site.id},
mon_type="server",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -337,7 +332,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -347,7 +341,7 @@ class TestPolicyViews(TacticalTestCase):
self.assertEqual(resp.status_code, 200)
# called because the relation changed
mock_checks_task.assert_called_with(clear=True)
mock_checks_task.assert_called()
mock_checks_task.reset_mock()
# Adding the same relations shouldn't trigger mocks
@@ -396,7 +390,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -409,7 +402,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -422,7 +414,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site_id": site.id},
mon_type="server",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -435,7 +426,6 @@ class TestPolicyViews(TacticalTestCase):
mock_checks_location_task.assert_called_with(
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
mock_checks_location_task.reset_mock()
@@ -444,7 +434,7 @@ class TestPolicyViews(TacticalTestCase):
resp = self.client.post(url, agent_payload, format="json")
self.assertEqual(resp.status_code, 200)
# called because the relation changed
mock_checks_task.assert_called_with(clear=True)
mock_checks_task.assert_called()
mock_checks_task.reset_mock()
# adding the same relations shouldn't trigger mocks
@@ -753,7 +743,7 @@ class TestPolicyTasks(TacticalTestCase):
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
# test policy assigned to agent
generate_agent_checks_from_policies_task(policy.id, clear=True)
generate_agent_checks_from_policies_task(policy.id)
# make sure all checks were created. should be 7
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
@@ -832,7 +822,6 @@ class TestPolicyTasks(TacticalTestCase):
generate_agent_checks_by_location_task(
{"site_id": sites[0].id},
"server",
clear=True,
create_tasks=True,
)
@@ -846,7 +835,6 @@ class TestPolicyTasks(TacticalTestCase):
generate_agent_checks_by_location_task(
{"site__client_id": clients[0].id},
"workstation",
clear=True,
create_tasks=True,
)
# workstation_agent should now have policy checks and the other agents should not
@@ -875,7 +863,7 @@ class TestPolicyTasks(TacticalTestCase):
core.workstation_policy = policy
core.save()
generate_all_agent_checks_task("server", clear=True, create_tasks=True)
generate_all_agent_checks_task("server", create_tasks=True)
# all servers should have 7 checks
for agent in server_agents:
@@ -884,7 +872,7 @@ class TestPolicyTasks(TacticalTestCase):
for agent in workstation_agents:
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
generate_all_agent_checks_task("workstation", clear=True, create_tasks=True)
generate_all_agent_checks_task("workstation", create_tasks=True)
# all agents should have 7 checks now
for agent in server_agents:
@@ -961,7 +949,7 @@ class TestPolicyTasks(TacticalTestCase):
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
generate_agent_tasks_from_policies_task(policy.id, clear=True)
generate_agent_tasks_from_policies_task(policy.id)
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
@@ -1000,9 +988,7 @@ class TestPolicyTasks(TacticalTestCase):
agent1 = baker.make_recipe("agents.agent", site=sites[1])
agent2 = baker.make_recipe("agents.agent", site=sites[3])
generate_agent_tasks_by_location_task(
{"site_id": sites[0].id}, "server", clear=True
)
generate_agent_tasks_by_location_task({"site_id": sites[0].id}, "server")
# all servers in site1 and site2 should have 3 tasks
self.assertEqual(
@@ -1013,7 +999,7 @@ class TestPolicyTasks(TacticalTestCase):
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
generate_agent_tasks_by_location_task(
{"site__client_id": clients[0].id}, "workstation", clear=True
{"site__client_id": clients[0].id}, "workstation"
)
# all workstations in Default1 should have 3 tasks
@@ -1051,10 +1037,13 @@ class TestPolicyTasks(TacticalTestCase):
for task in tasks:
run_win_task.assert_any_call(task.id)
def test_update_policy_tasks(self):
@patch("agents.models.Agent.nats_cmd")
def test_update_policy_tasks(self, nats_cmd):
from .tasks import update_policy_task_fields_task
from autotasks.models import AutomatedTask
nats_cmd.return_value = "ok"
# setup data
policy = baker.make("automation.Policy", active=True)
tasks = baker.make(

View File

@@ -83,7 +83,6 @@ class GetUpdateDeletePolicy(APIView):
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
generate_agent_checks_from_policies_task.delay(
policypk=policy.pk,
clear=(not saved_policy.active or not saved_policy.enforced),
create_tasks=(saved_policy.active != old_active),
)
@@ -93,8 +92,8 @@ class GetUpdateDeletePolicy(APIView):
policy = get_object_or_404(Policy, pk=pk)
# delete all managed policy checks off of agents
generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True)
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True)
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk)
policy.delete()
return Response("ok")
@@ -218,7 +217,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
@@ -236,7 +234,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
@@ -258,7 +255,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
)
@@ -276,7 +272,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site_id": site.id},
mon_type="server",
clear=True,
create_tasks=True,
)
@@ -296,7 +291,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
@@ -311,7 +305,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
)
@@ -329,7 +322,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
)
@@ -343,7 +335,6 @@ class GetRelated(APIView):
generate_agent_checks_by_location_task.delay(
location={"site_id": site.pk},
mon_type="server",
clear=True,
create_tasks=True,
)
@@ -358,14 +349,14 @@ class GetRelated(APIView):
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
agent.policy = policy
agent.save()
agent.generate_checks_from_policies(clear=True)
agent.generate_tasks_from_policies(clear=True)
agent.generate_checks_from_policies()
agent.generate_tasks_from_policies()
else:
if agent.policy:
agent.policy = None
agent.save()
agent.generate_checks_from_policies(clear=True)
agent.generate_tasks_from_policies(clear=True)
agent.generate_checks_from_policies()
agent.generate_tasks_from_policies()
return Response("ok")
@@ -422,11 +413,15 @@ class UpdatePatchPolicy(APIView):
agents = None
if "client" in request.data:
agents = Agent.objects.filter(site__client_id=request.data["client"])
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
site__client_id=request.data["client"]
)
elif "site" in request.data:
agents = Agent.objects.filter(site_id=request.data["site"])
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
site_id=request.data["site"]
)
else:
agents = Agent.objects.all()
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
for agent in agents:
winupdatepolicy = agent.winupdatepolicy.get()

View File

@@ -7,7 +7,7 @@ class Command(BaseCommand):
help = "Checks for orphaned tasks on all agents and removes them"
def handle(self, *args, **kwargs):
agents = Agent.objects.all()
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
online = [i for i in agents if i.status == "online"]
for agent in online:
remove_orphaned_win_tasks.delay(agent.pk)

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.3 on 2020-11-29 09:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("autotasks", "0008_auto_20201030_1515"),
]
operations = [
migrations.AddField(
model_name="automatedtask",
name="run_time_bit_weekdays",
field=models.IntegerField(blank=True, null=True),
),
]

View File

@@ -0,0 +1,33 @@
from django.db import migrations
from tacticalrmm.utils import get_bit_days
DAYS_OF_WEEK = {
0: "Monday",
1: "Tuesday",
2: "Wednesday",
3: "Thursday",
4: "Friday",
5: "Saturday",
6: "Sunday",
}
def migrate_days(apps, schema_editor):
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
for task in AutomatedTask.objects.exclude(run_time_days__isnull=True).exclude(
run_time_days=[]
):
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
task.run_time_bit_weekdays = get_bit_days(run_days)
task.save(update_fields=["run_time_bit_weekdays"])
class Migration(migrations.Migration):
dependencies = [
("autotasks", "0009_automatedtask_run_time_bit_weekdays"),
]
operations = [
migrations.RunPython(migrate_days),
]

View File

@@ -6,8 +6,8 @@ import datetime as dt
from django.db import models
from django.contrib.postgres.fields import ArrayField
from django.db.models.fields import DateTimeField
from automation.models import Policy
from logs.models import BaseAuditModel
from tacticalrmm.utils import bitdays_to_string
RUN_TIME_DAY_CHOICES = [
(0, "Monday"),
@@ -42,7 +42,7 @@ class AutomatedTask(BaseAuditModel):
blank=True,
)
policy = models.ForeignKey(
Policy,
"automation.Policy",
related_name="autotasks",
null=True,
blank=True,
@@ -69,6 +69,8 @@ class AutomatedTask(BaseAuditModel):
on_delete=models.SET_NULL,
)
name = models.CharField(max_length=255)
run_time_bit_weekdays = models.IntegerField(null=True, blank=True)
# run_time_days is deprecated, use bit weekdays
run_time_days = ArrayField(
models.IntegerField(choices=RUN_TIME_DAY_CHOICES, null=True, blank=True),
null=True,
@@ -107,21 +109,12 @@ class AutomatedTask(BaseAuditModel):
elif self.task_type == "runonce":
return f'Run once on {self.run_time_date.strftime("%m/%d/%Y %I:%M%p")}'
elif self.task_type == "scheduled":
ret = []
for i in self.run_time_days:
for j in RUN_TIME_DAY_CHOICES:
if i in j:
ret.append(j[1][0:3])
run_time_nice = dt.datetime.strptime(
self.run_time_minute, "%H:%M"
).strftime("%I:%M %p")
if len(ret) == 7:
return f"Every day at {run_time_nice}"
else:
days = ",".join(ret)
return f"{days} at {run_time_nice}"
days = bitdays_to_string(self.run_time_bit_weekdays)
return f"{days} at {run_time_nice}"
@property
def last_run_as_timezone(self):
@@ -169,6 +162,7 @@ class AutomatedTask(BaseAuditModel):
name=self.name,
run_time_days=self.run_time_days,
run_time_minute=self.run_time_minute,
run_time_bit_weekdays=self.run_time_bit_weekdays,
run_time_date=self.run_time_date,
task_type=self.task_type,
win_task_name=self.win_task_name,

View File

@@ -1,52 +1,37 @@
import asyncio
import datetime as dt
from loguru import logger
from tacticalrmm.celery import app
from django.conf import settings
import pytz
from django.utils import timezone as djangotime
from packaging import version as pyver
from .models import AutomatedTask
from logs.models import PendingAction
logger.configure(**settings.LOG_CONFIG)
DAYS_OF_WEEK = {
0: "Monday",
1: "Tuesday",
2: "Wednesday",
3: "Thursday",
4: "Friday",
5: "Saturday",
6: "Sunday",
}
@app.task
def create_win_task_schedule(pk, pending_action=False):
task = AutomatedTask.objects.get(pk=pk)
if task.task_type == "scheduled":
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
r = task.agent.salt_api_cmd(
timeout=20,
func="task.create_task",
arg=[
f"name={task.win_task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {task.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Weekly",
f'start_time="{task.run_time_minute}"',
"ac_only=False",
"stop_if_on_batteries=False",
],
kwargs={"days_of_week": run_days},
)
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "weekly",
"weekdays": task.run_time_bit_weekdays,
"pk": task.pk,
"name": task.win_task_name,
"hour": dt.datetime.strptime(task.run_time_minute, "%H:%M").hour,
"min": dt.datetime.strptime(task.run_time_minute, "%H:%M").minute,
},
}
elif task.task_type == "runonce":
# check if scheduled time is in the past
agent_tz = pytz.timezone(task.agent.timezone)
task_time_utc = task.run_time_date.replace(tzinfo=agent_tz).astimezone(pytz.utc)
@@ -57,47 +42,48 @@ def create_win_task_schedule(pk, pending_action=False):
) + djangotime.timedelta(minutes=5)
task.save()
r = task.agent.salt_api_cmd(
timeout=20,
func="task.create_task",
arg=[
f"name={task.win_task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {task.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Once",
f'start_date="{task.run_time_date.strftime("%Y-%m-%d")}"',
f'start_time="{task.run_time_date.strftime("%H:%M")}"',
"ac_only=False",
"stop_if_on_batteries=False",
"start_when_available=True",
],
)
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "once",
"pk": task.pk,
"name": task.win_task_name,
"year": int(dt.datetime.strftime(task.run_time_date, "%Y")),
"month": dt.datetime.strftime(task.run_time_date, "%B"),
"day": int(dt.datetime.strftime(task.run_time_date, "%d")),
"hour": int(dt.datetime.strftime(task.run_time_date, "%H")),
"min": int(dt.datetime.strftime(task.run_time_date, "%M")),
},
}
if task.remove_if_not_scheduled and pyver.parse(
task.agent.version
) >= pyver.parse("1.1.2"):
nats_data["schedtaskpayload"]["deleteafter"] = True
elif task.task_type == "checkfailure" or task.task_type == "manual":
r = task.agent.salt_api_cmd(
timeout=20,
func="task.create_task",
arg=[
f"name={task.win_task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {task.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Once",
'start_date="1975-01-01"',
'start_time="01:00"',
"ac_only=False",
"stop_if_on_batteries=False",
],
)
nats_data = {
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "manual",
"pk": task.pk,
"name": task.win_task_name,
},
}
else:
return "error"
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
if r != "ok":
# don't create pending action if this task was initiated by a pending action
if not pending_action:
# complete any other pending actions on agent with same task_id
task.agent.remove_matching_pending_task_actions(task.id)
PendingAction(
agent=task.agent,
action_type="taskaction",
@@ -129,13 +115,16 @@ def create_win_task_schedule(pk, pending_action=False):
def enable_or_disable_win_task(pk, action, pending_action=False):
task = AutomatedTask.objects.get(pk=pk)
r = task.agent.salt_api_cmd(
timeout=20,
func="task.edit_task",
arg=[f"name={task.win_task_name}", f"enabled={action}"],
)
nats_data = {
"func": "enableschedtask",
"schedtaskpayload": {
"name": task.win_task_name,
"enabled": action,
},
}
r = asyncio.run(task.agent.nats_cmd(nats_data))
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
if r != "ok":
# don't create pending action if this task was initiated by a pending action
if not pending_action:
PendingAction(
@@ -150,9 +139,6 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
task.sync_status = "notsynced"
task.save(update_fields=["sync_status"])
logger.error(
f"Unable to update the scheduled task {task.win_task_name} on {task.agent.hostname}. It will be updated when the agent checks in."
)
return
# clear pending action since it was successful
@@ -163,7 +149,7 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
task.sync_status = "synced"
task.save(update_fields=["sync_status"])
logger.info(f"{task.agent.hostname} task {task.name} was edited.")
return "ok"
@@ -171,15 +157,19 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
def delete_win_task_schedule(pk, pending_action=False):
task = AutomatedTask.objects.get(pk=pk)
r = task.agent.salt_api_cmd(
timeout=20,
func="task.delete_task",
arg=[f"name={task.win_task_name}"],
)
nats_data = {
"func": "delschedtask",
"schedtaskpayload": {"name": task.win_task_name},
}
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
if r != "ok" and "The system cannot find the file specified" not in r:
# don't create pending action if this task was initiated by a pending action
if not pending_action:
# complete any other pending actions on agent with same task_id
task.agent.remove_matching_pending_task_actions(task.id)
PendingAction(
agent=task.agent,
action_type="taskaction",
@@ -188,10 +178,7 @@ def delete_win_task_schedule(pk, pending_action=False):
task.sync_status = "pendingdeletion"
task.save(update_fields=["sync_status"])
logger.error(
f"Unable to delete scheduled task {task.win_task_name} on {task.agent.hostname}. It was marked pending deletion and will be removed when the agent checks in."
)
return
return "timeout"
# complete pending action since it was successful
if pending_action:
@@ -199,16 +186,17 @@ def delete_win_task_schedule(pk, pending_action=False):
pendingaction.status = "completed"
pendingaction.save(update_fields=["status"])
# complete any other pending actions on agent with same task_id
task.agent.remove_matching_pending_task_actions(task.id)
task.delete()
logger.info(f"{task.agent.hostname} task {task.name} was deleted.")
return "ok"
@app.task
def run_win_task(pk):
# TODO deprecated, remove this function once salt gone
task = AutomatedTask.objects.get(pk=pk)
r = task.agent.salt_api_async(func="task.run", arg=[f"name={task.win_task_name}"])
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
return "ok"
@@ -220,18 +208,9 @@ def remove_orphaned_win_tasks(agentpk):
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
r = agent.salt_api_cmd(
timeout=15,
func="task.list_tasks",
)
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
if r == "timeout" or r == "error":
logger.error(
f"Unable to clean up scheduled tasks on {agent.hostname}. Agent might be offline"
)
return "errtimeout"
if not isinstance(r, list):
if not isinstance(r, list) and not r: # empty list
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
return "notlist"
@@ -240,7 +219,8 @@ def remove_orphaned_win_tasks(agentpk):
exclude_tasks = (
"TacticalRMM_fixmesh",
"TacticalRMM_SchedReboot",
"TacticalRMM_saltwatchdog", # will be implemented in future
"TacticalRMM_sync",
"TacticalRMM_agentupdate",
)
for task in r:
@@ -250,16 +230,16 @@ def remove_orphaned_win_tasks(agentpk):
if task.startswith("TacticalRMM_") and task not in agent_task_names:
# delete task since it doesn't exist in UI
ret = agent.salt_api_cmd(
timeout=20,
func="task.delete_task",
arg=[f"name={task}"],
)
if isinstance(ret, bool) and ret is True:
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
else:
nats_data = {
"func": "delschedtask",
"schedtaskpayload": {"name": task},
}
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
if ret != "ok":
logger.error(
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
)
else:
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")

View File

@@ -1,3 +1,4 @@
import datetime as dt
from unittest.mock import patch, call
from model_bakery import baker
from django.utils import timezone as djangotime
@@ -25,9 +26,9 @@ class TestAutotaskViews(TacticalTestCase):
# setup data
script = baker.make_recipe("scripts.script")
agent = baker.make_recipe("agents.agent")
agent_old = baker.make_recipe("agents.agent", version="0.9.0")
policy = baker.make("automation.Policy")
check = baker.make_recipe("checks.diskspace_check", agent=agent)
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
# test script set to invalid pk
data = {"autotask": {"script": 500}}
@@ -50,10 +51,10 @@ class TestAutotaskViews(TacticalTestCase):
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 404)
# test invalid agent version
# test old agent version
data = {
"autotask": {"script": script.id, "script_args": ["args"]},
"agent": agent_old.id,
"autotask": {"script": script.id},
"agent": old_agent.id,
}
resp = self.client.post(url, data, format="json")
@@ -63,7 +64,7 @@ class TestAutotaskViews(TacticalTestCase):
data = {
"autotask": {
"name": "Test Task Scheduled with Assigned Check",
"run_time_days": [0, 1, 2],
"run_time_days": ["Sunday", "Monday", "Friday"],
"run_time_minute": "10:00",
"timeout": 120,
"enabled": True,
@@ -84,6 +85,7 @@ class TestAutotaskViews(TacticalTestCase):
data = {
"autotask": {
"name": "Test Task Manual",
"run_time_days": [],
"timeout": 120,
"enabled": True,
"script": script.id,
@@ -213,8 +215,8 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
self.authenticate()
self.setup_coresettings()
@patch("agents.models.Agent.salt_api_cmd")
def test_remove_orphaned_win_task(self, salt_api_cmd):
@patch("agents.models.Agent.nats_cmd")
def test_remove_orphaned_win_task(self, nats_cmd):
self.agent = baker.make_recipe("agents.agent")
self.task1 = AutomatedTask.objects.create(
agent=self.agent,
@@ -222,20 +224,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
win_task_name=AutomatedTask.generate_task_name(),
)
salt_api_cmd.return_value = "timeout"
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
self.assertEqual(ret.result, "errtimeout")
salt_api_cmd.return_value = "error"
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
self.assertEqual(ret.result, "errtimeout")
salt_api_cmd.return_value = "task not found in"
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
self.assertEqual(ret.result, "notlist")
salt_api_cmd.reset_mock()
# test removing an orphaned task
win_tasks = [
"Adobe Acrobat Update Task",
@@ -250,50 +238,54 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
]
self.calls = [
call(timeout=15, func="task.list_tasks"),
call({"func": "listschedtasks"}, timeout=10),
call(
timeout=20,
func="task.delete_task",
arg=["name=TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"],
{
"func": "delschedtask",
"schedtaskpayload": {
"name": "TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"
},
},
timeout=10,
),
]
salt_api_cmd.side_effect = [win_tasks, True]
nats_cmd.side_effect = [win_tasks, "ok"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
self.assertEqual(salt_api_cmd.call_count, 2)
salt_api_cmd.assert_has_calls(self.calls)
self.assertEqual(nats_cmd.call_count, 2)
nats_cmd.assert_has_calls(self.calls)
self.assertEqual(ret.status, "SUCCESS")
# test salt delete_task fail
salt_api_cmd.reset_mock()
salt_api_cmd.side_effect = [win_tasks, False]
# test nats delete task fail
nats_cmd.reset_mock()
nats_cmd.side_effect = [win_tasks, "error deleting task"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
salt_api_cmd.assert_has_calls(self.calls)
self.assertEqual(salt_api_cmd.call_count, 2)
nats_cmd.assert_has_calls(self.calls)
self.assertEqual(nats_cmd.call_count, 2)
self.assertEqual(ret.status, "SUCCESS")
# no orphaned tasks
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb")
salt_api_cmd.side_effect = [win_tasks, True]
nats_cmd.side_effect = [win_tasks, "ok"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
self.assertEqual(salt_api_cmd.call_count, 1)
self.assertEqual(nats_cmd.call_count, 1)
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.salt_api_async")
def test_run_win_task(self, salt_api_async):
@patch("agents.models.Agent.nats_cmd")
def test_run_win_task(self, nats_cmd):
self.agent = baker.make_recipe("agents.agent")
self.task1 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 1",
win_task_name=AutomatedTask.generate_task_name(),
)
salt_api_async.return_value = "Response 200"
nats_cmd.return_value = "ok"
ret = run_win_task.s(self.task1.pk).apply()
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.salt_api_cmd")
def test_create_win_task_schedule(self, salt_api_cmd):
@patch("agents.models.Agent.nats_cmd")
def test_create_win_task_schedule(self, nats_cmd):
self.agent = baker.make_recipe("agents.agent")
task_name = AutomatedTask.generate_task_name()
@@ -303,46 +295,32 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
name="test task 1",
win_task_name=task_name,
task_type="scheduled",
run_time_days=[0, 1, 6],
run_time_bit_weekdays=127,
run_time_minute="21:55",
)
self.assertEqual(self.task1.sync_status, "notsynced")
salt_api_cmd.return_value = True
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
self.assertEqual(salt_api_cmd.call_count, 1)
salt_api_cmd.assert_called_with(
timeout=20,
func="task.create_task",
arg=[
f"name={task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {self.task1.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Weekly",
'start_time="21:55"',
"ac_only=False",
"stop_if_on_batteries=False",
],
kwargs={"days_of_week": ["Monday", "Tuesday", "Sunday"]},
self.assertEqual(nats_cmd.call_count, 1)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "weekly",
"weekdays": 127,
"pk": self.task1.pk,
"name": task_name,
"hour": 21,
"min": 55,
},
},
timeout=10,
)
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
self.assertEqual(self.task1.sync_status, "synced")
salt_api_cmd.return_value = "timeout"
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
self.assertEqual(ret.status, "SUCCESS")
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
self.assertEqual(self.task1.sync_status, "notsynced")
salt_api_cmd.return_value = "error"
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
self.assertEqual(ret.status, "SUCCESS")
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
self.assertEqual(self.task1.sync_status, "notsynced")
salt_api_cmd.return_value = False
nats_cmd.return_value = "timeout"
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
self.assertEqual(ret.status, "SUCCESS")
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
@@ -353,7 +331,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
agent=self.agent, action_type="taskaction"
)
self.assertEqual(self.pending_action.status, "pending")
salt_api_cmd.return_value = True
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(
pk=self.task1.pk, pending_action=self.pending_action.pk
).apply()
@@ -362,7 +340,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
self.assertEqual(self.pending_action.status, "completed")
# test runonce with future date
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
task_name = AutomatedTask.generate_task_name()
run_time_date = djangotime.now() + djangotime.timedelta(hours=22)
self.task2 = AutomatedTask.objects.create(
@@ -372,30 +350,29 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
task_type="runonce",
run_time_date=run_time_date,
)
salt_api_cmd.return_value = True
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
salt_api_cmd.assert_called_with(
timeout=20,
func="task.create_task",
arg=[
f"name={task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {self.task2.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Once",
f'start_date="{run_time_date.strftime("%Y-%m-%d")}"',
f'start_time="{run_time_date.strftime("%H:%M")}"',
"ac_only=False",
"stop_if_on_batteries=False",
"start_when_available=True",
],
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "once",
"pk": self.task2.pk,
"name": task_name,
"year": int(dt.datetime.strftime(self.task2.run_time_date, "%Y")),
"month": dt.datetime.strftime(self.task2.run_time_date, "%B"),
"day": int(dt.datetime.strftime(self.task2.run_time_date, "%d")),
"hour": int(dt.datetime.strftime(self.task2.run_time_date, "%H")),
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
},
},
timeout=10,
)
self.assertEqual(ret.status, "SUCCESS")
# test runonce with date in the past
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
task_name = AutomatedTask.generate_task_name()
run_time_date = djangotime.now() - djangotime.timedelta(days=13)
self.task3 = AutomatedTask.objects.create(
@@ -405,31 +382,13 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
task_type="runonce",
run_time_date=run_time_date,
)
salt_api_cmd.return_value = True
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
salt_api_cmd.assert_called_with(
timeout=20,
func="task.create_task",
arg=[
f"name={task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {self.task3.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Once",
f'start_date="{self.task3.run_time_date.strftime("%Y-%m-%d")}"',
f'start_time="{self.task3.run_time_date.strftime("%H:%M")}"',
"ac_only=False",
"stop_if_on_batteries=False",
"start_when_available=True",
],
)
self.assertEqual(ret.status, "SUCCESS")
# test checkfailure
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
self.check = baker.make_recipe("checks.diskspace_check", agent=self.agent)
task_name = AutomatedTask.generate_task_name()
self.task4 = AutomatedTask.objects.create(
@@ -439,29 +398,24 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
task_type="checkfailure",
assigned_check=self.check,
)
salt_api_cmd.return_value = True
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
salt_api_cmd.assert_called_with(
timeout=20,
func="task.create_task",
arg=[
f"name={task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {self.task4.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Once",
'start_date="1975-01-01"',
'start_time="01:00"',
"ac_only=False",
"stop_if_on_batteries=False",
],
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "manual",
"pk": self.task4.pk,
"name": task_name,
},
},
timeout=10,
)
self.assertEqual(ret.status, "SUCCESS")
# test manual
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
task_name = AutomatedTask.generate_task_name()
self.task5 = AutomatedTask.objects.create(
agent=self.agent,
@@ -469,23 +423,18 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
win_task_name=task_name,
task_type="manual",
)
salt_api_cmd.return_value = True
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
salt_api_cmd.assert_called_with(
timeout=20,
func="task.create_task",
arg=[
f"name={task_name}",
"force=True",
"action_type=Execute",
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
f'arguments="-m taskrunner -p {self.task5.pk}"',
"start_in=C:\\Program Files\\TacticalAgent",
"trigger_type=Once",
'start_date="1975-01-01"',
'start_time="01:00"',
"ac_only=False",
"stop_if_on_batteries=False",
],
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "manual",
"pk": self.task5.pk,
"name": task_name,
},
},
timeout=10,
)
self.assertEqual(ret.status, "SUCCESS")

View File

@@ -20,7 +20,7 @@ from .tasks import (
delete_win_task_schedule,
enable_or_disable_win_task,
)
from tacticalrmm.utils import notify_error
from tacticalrmm.utils import notify_error, get_bit_days
class AddAutoTask(APIView):
@@ -38,17 +38,20 @@ class AddAutoTask(APIView):
parent = {"policy": policy}
else:
agent = get_object_or_404(Agent, pk=data["agent"])
if not agent.has_gotasks:
return notify_error("Requires agent version 1.1.1 or greater")
parent = {"agent": agent}
added = "0.11.0"
if data["autotask"]["script_args"] and agent.not_supported(added):
return notify_error(
f"Script arguments only available in agent {added} or greater"
)
check = None
if data["autotask"]["assigned_check"]:
check = get_object_or_404(Check, pk=data["autotask"]["assigned_check"])
bit_weekdays = None
if data["autotask"]["run_time_days"]:
bit_weekdays = get_bit_days(data["autotask"]["run_time_days"])
del data["autotask"]["run_time_days"]
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
serializer.is_valid(raise_exception=True)
obj = serializer.save(
@@ -56,6 +59,7 @@ class AddAutoTask(APIView):
script=script,
win_task_name=AutomatedTask.generate_task_name(),
assigned_check=check,
run_time_bit_weekdays=bit_weekdays,
)
if not "policy" in data:

View File

@@ -1,5 +1,6 @@
from django.contrib import admin
from .models import Check
from .models import Check, CheckHistory
admin.site.register(Check)
admin.site.register(CheckHistory)

View File

@@ -0,0 +1,30 @@
# Generated by Django 3.1.4 on 2021-01-09 02:56
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("checks", "0010_auto_20200922_1344"),
]
operations = [
migrations.AddField(
model_name="check",
name="run_history",
field=django.contrib.postgres.fields.ArrayField(
base_field=django.contrib.postgres.fields.ArrayField(
base_field=models.PositiveIntegerField(),
blank=True,
null=True,
size=None,
),
blank=True,
default=list,
null=True,
size=None,
),
),
]

View File

@@ -0,0 +1,39 @@
# Generated by Django 3.1.4 on 2021-01-09 21:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("checks", "0010_auto_20200922_1344"),
]
operations = [
migrations.CreateModel(
name="CheckHistory",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("x", models.DateTimeField()),
("y", models.PositiveIntegerField()),
("results", models.JSONField(blank=True, null=True)),
(
"check_history",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="check_history",
to="checks.check",
),
),
],
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.4 on 2021-01-10 05:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("checks", "0011_checkhistory"),
]
operations = [
migrations.AlterField(
model_name="checkhistory",
name="y",
field=models.PositiveIntegerField(blank=True, null=True),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.4 on 2021-01-10 05:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("checks", "0012_auto_20210110_0503"),
]
operations = [
migrations.AlterField(
model_name="checkhistory",
name="y",
field=models.PositiveIntegerField(null=True),
),
]

View File

@@ -0,0 +1,13 @@
# Generated by Django 3.1.4 on 2021-01-10 18:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("checks", "0013_auto_20210110_0505"),
("checks", "0011_check_run_history"),
]
operations = []

View File

@@ -0,0 +1,27 @@
# Generated by Django 3.1.4 on 2021-01-10 18:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("checks", "0014_merge_20210110_1808"),
]
operations = [
migrations.RemoveField(
model_name="check",
name="run_history",
),
migrations.AlterField(
model_name="checkhistory",
name="x",
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name="checkhistory",
name="y",
field=models.PositiveIntegerField(blank=True, default=None, null=True),
),
]

View File

@@ -3,12 +3,13 @@ import string
import os
import json
import pytz
from statistics import mean
from statistics import mean, mode
from django.db import models
from django.conf import settings
from django.contrib.postgres.fields import ArrayField
from django.core.validators import MinValueValidator, MaxValueValidator
from rest_framework.fields import JSONField
from core.models import CoreSettings
from logs.models import BaseAuditModel
@@ -214,6 +215,9 @@ class Check(BaseAuditModel):
"modified_time",
]
def add_check_history(self, value, more_info=None):
CheckHistory.objects.create(check_history=self, y=value, results=more_info)
def handle_checkv2(self, data):
# cpuload or mem checks
if self.check_type == "cpuload" or self.check_type == "memory":
@@ -232,6 +236,9 @@ class Check(BaseAuditModel):
else:
self.status = "passing"
# add check history
self.add_check_history(data["percent"])
# diskspace checks
elif self.check_type == "diskspace":
if data["exists"]:
@@ -245,6 +252,9 @@ class Check(BaseAuditModel):
self.status = "passing"
self.more_info = f"Total: {total}B, Free: {free}B"
# add check history
self.add_check_history(percent_used)
else:
self.status = "failing"
self.more_info = f"Disk {self.disk} does not exist"
@@ -277,6 +287,17 @@ class Check(BaseAuditModel):
]
)
# add check history
self.add_check_history(
1 if self.status == "failing" else 0,
{
"retcode": data["retcode"],
"stdout": data["stdout"][:60],
"stderr": data["stderr"][:60],
"execution_time": self.execution_time,
},
)
# ping checks
elif self.check_type == "ping":
success = ["Reply", "bytes", "time", "TTL"]
@@ -293,6 +314,10 @@ class Check(BaseAuditModel):
self.more_info = output
self.save(update_fields=["more_info"])
self.add_check_history(
1 if self.status == "failing" else 0, self.more_info[:60]
)
# windows service checks
elif self.check_type == "winsvc":
svc_stat = data["status"]
@@ -332,6 +357,10 @@ class Check(BaseAuditModel):
self.save(update_fields=["more_info"])
self.add_check_history(
1 if self.status == "failing" else 0, self.more_info[:60]
)
elif self.check_type == "eventlog":
log = []
is_wildcard = self.event_id_is_wildcard
@@ -391,6 +420,11 @@ class Check(BaseAuditModel):
self.extra_details = {"log": log}
self.save(update_fields=["extra_details"])
self.add_check_history(
1 if self.status == "failing" else 0,
"Events Found:" + str(len(self.extra_details["log"])),
)
# handle status
if self.status == "failing":
self.fail_count += 1
@@ -411,42 +445,6 @@ class Check(BaseAuditModel):
return self.status
def handle_check(self, data):
if self.check_type != "cpuload" and self.check_type != "memory":
if data["status"] == "passing" and self.fail_count != 0:
self.fail_count = 0
self.save(update_fields=["fail_count"])
elif data["status"] == "failing":
self.fail_count += 1
self.save(update_fields=["fail_count"])
else:
self.history.append(data["percent"])
if len(self.history) > 15:
self.history = self.history[-15:]
self.save(update_fields=["history"])
avg = int(mean(self.history))
if avg > self.threshold:
self.status = "failing"
self.fail_count += 1
self.save(update_fields=["status", "fail_count"])
else:
self.status = "passing"
if self.fail_count != 0:
self.fail_count = 0
self.save(update_fields=["status", "fail_count"])
else:
self.save(update_fields=["status"])
if self.email_alert and self.fail_count >= self.fails_b4_alert:
handle_check_email_alert_task.delay(self.pk)
@staticmethod
def serialize(check):
# serializes the check and returns json
@@ -645,3 +643,17 @@ class Check(BaseAuditModel):
body = subject
CORE.send_sms(body)
class CheckHistory(models.Model):
check_history = models.ForeignKey(
Check,
related_name="check_history",
on_delete=models.CASCADE,
)
x = models.DateTimeField(auto_now_add=True)
y = models.PositiveIntegerField(null=True, blank=True, default=None)
results = models.JSONField(null=True, blank=True)
def __str__(self):
return self.check_history.readable_desc

View File

@@ -1,8 +1,8 @@
import validators as _v
import pytz
from rest_framework import serializers
from .models import Check
from .models import Check, CheckHistory
from autotasks.models import AutomatedTask
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
@@ -65,6 +65,26 @@ class CheckSerializer(serializers.ModelSerializer):
"Please enter a valid IP address or domain name"
)
if check_type == "cpuload" and not self.instance:
if (
Check.objects.filter(**self.context, check_type="cpuload")
.exclude(managed_by_policy=True)
.exists()
):
raise serializers.ValidationError(
"A cpuload check for this agent already exists"
)
if check_type == "memory" and not self.instance:
if (
Check.objects.filter(**self.context, check_type="memory")
.exclude(managed_by_policy=True)
.exists()
):
raise serializers.ValidationError(
"A memory check for this agent already exists"
)
return val
@@ -75,101 +95,7 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
class CheckRunnerGetSerializer(serializers.ModelSerializer):
# for the windows agent
# only send data needed for agent to run a check
assigned_task = serializers.SerializerMethodField()
script = ScriptSerializer(read_only=True)
def get_assigned_task(self, obj):
if obj.assignedtask.exists():
# this will not break agents on version 0.10.2 or lower
# newer agents once released will properly handle multiple tasks assigned to a check
task = obj.assignedtask.first()
return AssignedTaskCheckRunnerField(task).data
class Meta:
model = Check
exclude = [
"policy",
"managed_by_policy",
"overriden_by_policy",
"parent_check",
"name",
"more_info",
"last_run",
"email_alert",
"text_alert",
"fails_b4_alert",
"fail_count",
"email_sent",
"text_sent",
"outage_history",
"extra_details",
"stdout",
"stderr",
"retcode",
"execution_time",
"svc_display_name",
"svc_policy_mode",
"created_by",
"created_time",
"modified_by",
"modified_time",
"history",
]
class CheckRunnerGetSerializerV2(serializers.ModelSerializer):
# for the windows __python__ agent
# only send data needed for agent to run a check
assigned_tasks = serializers.SerializerMethodField()
script = ScriptSerializer(read_only=True)
def get_assigned_tasks(self, obj):
if obj.assignedtask.exists():
tasks = obj.assignedtask.all()
return AssignedTaskCheckRunnerField(tasks, many=True).data
class Meta:
model = Check
exclude = [
"policy",
"managed_by_policy",
"overriden_by_policy",
"parent_check",
"name",
"more_info",
"last_run",
"email_alert",
"text_alert",
"fails_b4_alert",
"fail_count",
"email_sent",
"text_sent",
"outage_history",
"extra_details",
"stdout",
"stderr",
"retcode",
"execution_time",
"svc_display_name",
"svc_policy_mode",
"created_by",
"created_time",
"modified_by",
"modified_time",
"history",
]
class CheckRunnerGetSerializerV3(serializers.ModelSerializer):
# for the windows __golang__ agent
# only send data needed for agent to run a check
# the difference here is in the script serializer
# script checks no longer rely on salt and are executed directly by the go agent
assigned_tasks = serializers.SerializerMethodField()
script = ScriptCheckSerializer(read_only=True)
@@ -217,3 +143,15 @@ class CheckResultsSerializer(serializers.ModelSerializer):
class Meta:
model = Check
fields = "__all__"
class CheckHistorySerializer(serializers.ModelSerializer):
x = serializers.SerializerMethodField()
def get_x(self, obj):
return obj.x.astimezone(pytz.timezone(self.context["timezone"])).isoformat()
# used for return large amounts of graph data
class Meta:
model = CheckHistory
fields = ("x", "y", "results")

View File

@@ -5,8 +5,6 @@ from time import sleep
from tacticalrmm.celery import app
from django.utils import timezone as djangotime
from agents.models import Agent
@app.task
def handle_check_email_alert_task(pk):
@@ -56,3 +54,15 @@ def handle_check_sms_alert_task(pk):
check.save(update_fields=["text_sent"])
return "ok"
@app.task
def prune_check_history(older_than_days: int) -> str:
from .models import CheckHistory
CheckHistory.objects.filter(
x__lt=djangotime.make_aware(dt.datetime.today())
- djangotime.timedelta(days=older_than_days)
).delete()
return "ok"

View File

@@ -1,13 +1,16 @@
from checks.models import CheckHistory
from tacticalrmm.test import TacticalTestCase
from .serializers import CheckSerializer
from django.utils import timezone as djangotime
from unittest.mock import patch
from model_bakery import baker
from itertools import cycle
class TestCheckViews(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
def test_get_disk_check(self):
# setup data
@@ -55,6 +58,52 @@ class TestCheckViews(TacticalTestCase):
resp = self.client.post(url, invalid_payload, format="json")
self.assertEqual(resp.status_code, 400)
def test_add_cpuload_check(self):
url = "/checks/checks/"
agent = baker.make_recipe("agents.agent")
payload = {
"pk": agent.pk,
"check": {
"check_type": "cpuload",
"threshold": 66,
"fails_b4_alert": 9,
},
}
resp = self.client.post(url, payload, format="json")
self.assertEqual(resp.status_code, 200)
payload["threshold"] = 87
resp = self.client.post(url, payload, format="json")
self.assertEqual(resp.status_code, 400)
self.assertEqual(
resp.json()["non_field_errors"][0],
"A cpuload check for this agent already exists",
)
def test_add_memory_check(self):
url = "/checks/checks/"
agent = baker.make_recipe("agents.agent")
payload = {
"pk": agent.pk,
"check": {
"check_type": "memory",
"threshold": 78,
"fails_b4_alert": 1,
},
}
resp = self.client.post(url, payload, format="json")
self.assertEqual(resp.status_code, 200)
payload["threshold"] = 55
resp = self.client.post(url, payload, format="json")
self.assertEqual(resp.status_code, 400)
self.assertEqual(
resp.json()["non_field_errors"][0],
"A memory check for this agent already exists",
)
def test_get_policy_disk_check(self):
# setup data
policy = baker.make("automation.Policy")
@@ -134,3 +183,111 @@ class TestCheckViews(TacticalTestCase):
self.assertEqual(resp.status_code, 200)
self.check_not_authenticated("patch", url_a)
@patch("agents.models.Agent.nats_cmd")
def test_run_checks(self, nats_cmd):
agent = baker.make_recipe("agents.agent", version="1.4.1")
agent_old = baker.make_recipe("agents.agent", version="1.0.2")
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
url = f"/checks/runchecks/{agent_old.pk}/"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater")
url = f"/checks/runchecks/{agent_b4_141.pk}/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
nats_cmd.assert_called_with({"func": "runchecks"}, wait=False)
nats_cmd.reset_mock()
nats_cmd.return_value = "busy"
url = f"/checks/runchecks/{agent.pk}/"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
self.assertEqual(r.json(), f"Checks are already running on {agent.hostname}")
nats_cmd.reset_mock()
nats_cmd.return_value = "ok"
url = f"/checks/runchecks/{agent.pk}/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
self.assertEqual(r.json(), f"Checks will now be re-run on {agent.hostname}")
nats_cmd.reset_mock()
nats_cmd.return_value = "timeout"
url = f"/checks/runchecks/{agent.pk}/"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
self.assertEqual(r.json(), "Unable to contact the agent")
self.check_not_authenticated("get", url)
def test_get_check_history(self):
# setup data
agent = baker.make_recipe("agents.agent")
check = baker.make_recipe("checks.diskspace_check", agent=agent)
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
check_history_data = baker.make(
"checks.CheckHistory",
check_history=check,
_quantity=30,
)
# need to manually set the date back 35 days
for check_history in check_history_data:
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
check_history.save()
# test invalid check pk
resp = self.client.patch("/checks/history/500/", format="json")
self.assertEqual(resp.status_code, 404)
url = f"/checks/history/{check.id}/"
# test with timeFilter last 30 days
data = {"timeFilter": 30}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 30)
# test with timeFilter equal to 0
data = {"timeFilter": 0}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 60)
self.check_not_authenticated("patch", url)
class TestCheckTasks(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
def test_prune_check_history(self):
from .tasks import prune_check_history
# setup data
check = baker.make_recipe("checks.diskspace_check")
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
check_history_data = baker.make(
"checks.CheckHistory",
check_history=check,
_quantity=30,
)
# need to manually set the date back 35 days
for check_history in check_history_data:
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
check_history.save()
# prune data 30 days old
prune_check_history(30)
self.assertEqual(CheckHistory.objects.count(), 30)
# prune all Check history Data
prune_check_history(0)
self.assertEqual(CheckHistory.objects.count(), 0)

View File

@@ -7,4 +7,5 @@ urlpatterns = [
path("<pk>/loadchecks/", views.load_checks),
path("getalldisks/", views.get_disks_for_policies),
path("runchecks/<pk>/", views.run_checks),
path("history/<int:checkpk>/", views.CheckHistory.as_view()),
]

View File

@@ -1,6 +1,11 @@
import asyncio
from packaging import version as pyver
from django.shortcuts import get_object_or_404
from django.db.models import Q
from django.utils import timezone as djangotime
from datetime import datetime as dt
from rest_framework.views import APIView
from rest_framework.response import Response
@@ -13,7 +18,7 @@ from automation.models import Policy
from .models import Check
from scripts.models import Script
from .serializers import CheckSerializer
from .serializers import CheckSerializer, CheckHistorySerializer
from automation.tasks import (
@@ -36,17 +41,6 @@ class AddCheck(APIView):
else:
agent = get_object_or_404(Agent, pk=request.data["pk"])
parent = {"agent": agent}
added = "0.11.0"
if (
request.data["check"]["check_type"] == "script"
and request.data["check"]["script_args"]
and agent.not_supported(version_added=added)
):
return notify_error(
{
"non_field_errors": f"Script arguments only available in agent {added} or greater"
}
)
script = None
if "script" in request.data["check"]:
@@ -58,13 +52,6 @@ class AddCheck(APIView):
request.data["check"]["check_type"] == "eventlog"
and request.data["check"]["event_id_is_wildcard"]
):
if agent and agent.not_supported(version_added="0.10.2"):
return notify_error(
{
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
}
)
request.data["check"]["event_id"] = 0
serializer = CheckSerializer(
@@ -116,31 +103,8 @@ class GetUpdateDeleteCheck(APIView):
pass
else:
if request.data["event_id_is_wildcard"]:
if check.agent.not_supported(version_added="0.10.2"):
return notify_error(
{
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
}
)
request.data["event_id"] = 0
elif check.check_type == "script":
added = "0.11.0"
try:
request.data["script_args"]
except KeyError:
pass
else:
if request.data["script_args"] and check.agent.not_supported(
version_added=added
):
return notify_error(
{
"non_field_errors": f"Script arguments only available in agent {added} or greater"
}
)
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
@@ -176,14 +140,46 @@ class GetUpdateDeleteCheck(APIView):
return Response(f"{check.readable_desc} was deleted!")
class CheckHistory(APIView):
def patch(self, request, checkpk):
check = get_object_or_404(Check, pk=checkpk)
timeFilter = Q()
if "timeFilter" in request.data:
if request.data["timeFilter"] != 0:
timeFilter = Q(
x__lte=djangotime.make_aware(dt.today()),
x__gt=djangotime.make_aware(dt.today())
- djangotime.timedelta(days=request.data["timeFilter"]),
)
check_history = check.check_history.filter(timeFilter).order_by("-x")
return Response(
CheckHistorySerializer(
check_history, context={"timezone": check.agent.timezone}, many=True
).data
)
@api_view()
def run_checks(request, pk):
agent = get_object_or_404(Agent, pk=pk)
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
return Response(agent.hostname)
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
if r == "busy":
return notify_error(f"Checks are already running on {agent.hostname}")
elif r == "ok":
return Response(f"Checks will now be re-run on {agent.hostname}")
else:
return notify_error("Unable to contact the agent")
else:
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
return Response(f"Checks will now be re-run on {agent.hostname}")
@api_view()

View File

@@ -6,48 +6,48 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clients', '0004_auto_20200821_2115'),
("clients", "0004_auto_20200821_2115"),
]
operations = [
migrations.AddField(
model_name='client',
name='created_by',
model_name="client",
name="created_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='client',
name='created_time',
model_name="client",
name="created_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='client',
name='modified_by',
model_name="client",
name="modified_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='client',
name='modified_time',
model_name="client",
name="modified_time",
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='site',
name='created_by',
model_name="site",
name="created_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='site',
name='created_time',
model_name="site",
name="created_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='site',
name='modified_by',
model_name="site",
name="modified_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='site',
name='modified_time',
model_name="site",
name="modified_time",
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -8,24 +8,67 @@ import uuid
class Migration(migrations.Migration):
dependencies = [
('knox', '0007_auto_20190111_0542'),
('clients', '0005_auto_20200922_1344'),
("knox", "0007_auto_20190111_0542"),
("clients", "0005_auto_20200922_1344"),
]
operations = [
migrations.CreateModel(
name='Deployment',
name="Deployment",
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uid', models.UUIDField(default=uuid.uuid4, editable=False)),
('mon_type', models.CharField(choices=[('server', 'Server'), ('workstation', 'Workstation')], default='server', max_length=255)),
('arch', models.CharField(choices=[('64', '64 bit'), ('32', '32 bit')], default='64', max_length=255)),
('expiry', models.DateTimeField(blank=True, null=True)),
('token_key', models.CharField(max_length=255)),
('install_flags', models.JSONField(blank=True, null=True)),
('auth_token', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploytokens', to='knox.authtoken')),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deployclients', to='clients.client')),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploysites', to='clients.site')),
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("uid", models.UUIDField(default=uuid.uuid4, editable=False)),
(
"mon_type",
models.CharField(
choices=[("server", "Server"), ("workstation", "Workstation")],
default="server",
max_length=255,
),
),
(
"arch",
models.CharField(
choices=[("64", "64 bit"), ("32", "32 bit")],
default="64",
max_length=255,
),
),
("expiry", models.DateTimeField(blank=True, null=True)),
("token_key", models.CharField(max_length=255)),
("install_flags", models.JSONField(blank=True, null=True)),
(
"auth_token",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="deploytokens",
to="knox.authtoken",
),
),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="deployclients",
to="clients.client",
),
),
(
"site",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="deploysites",
to="clients.site",
),
),
],
),
]

View File

@@ -6,18 +6,18 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clients', '0006_deployment'),
("clients", "0006_deployment"),
]
operations = [
migrations.RenameField(
model_name='client',
old_name='client',
new_name='name',
model_name="client",
old_name="client",
new_name="name",
),
migrations.RenameField(
model_name='site',
old_name='site',
new_name='name',
model_name="site",
old_name="site",
new_name="name",
),
]

View File

@@ -6,16 +6,16 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clients', '0007_auto_20201102_1920'),
("clients", "0007_auto_20201102_1920"),
]
operations = [
migrations.AlterModelOptions(
name='client',
options={'ordering': ('name',)},
name="client",
options={"ordering": ("name",)},
),
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',)},
name="site",
options={"ordering": ("name",)},
),
]

View File

@@ -38,7 +38,6 @@ class Client(BaseAuditModel):
@property
def has_failing_checks(self):
agents = (
Agent.objects.only(
"pk",
@@ -50,14 +49,17 @@ class Client(BaseAuditModel):
.filter(site__client=self)
.prefetch_related("agentchecks")
)
failing = 0
for agent in agents:
if agent.checks["has_failing_checks"]:
return True
failing += 1
if agent.overdue_email_alert or agent.overdue_text_alert:
return agent.status == "overdue"
if agent.status == "overdue":
failing += 1
return False
return failing > 0
@staticmethod
def serialize(client):
@@ -98,7 +100,6 @@ class Site(BaseAuditModel):
@property
def has_failing_checks(self):
agents = (
Agent.objects.only(
"pk",
@@ -110,14 +111,17 @@ class Site(BaseAuditModel):
.filter(site=self)
.prefetch_related("agentchecks")
)
failing = 0
for agent in agents:
if agent.checks["has_failing_checks"]:
return True
failing += 1
if agent.overdue_email_alert or agent.overdue_text_alert:
return agent.status == "overdue"
if agent.status == "overdue":
failing += 1
return False
return failing > 0
@staticmethod
def serialize(site):

View File

@@ -192,7 +192,7 @@ class GenerateAgent(APIView):
if not os.path.exists(go_bin):
return notify_error("Missing golang")
api = f"{request.scheme}://{request.get_host()}"
api = f"https://{request.get_host()}"
inno = (
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
if d.arch == "64"
@@ -223,7 +223,7 @@ class GenerateAgent(APIView):
f"GOARCH={goarch}",
go_bin,
"build",
f"-ldflags=\"-X 'main.Inno={inno}'",
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
f"-X 'main.Api={api}'",
f"-X 'main.Client={d.client.pk}'",
f"-X 'main.Site={d.site.pk}'",
@@ -282,4 +282,4 @@ class GenerateAgent(APIView):
response = HttpResponse()
response["Content-Disposition"] = f"attachment; filename={file_name}"
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
return response
return response

View File

@@ -56,8 +56,8 @@ func downloadAgent(filepath string) (err error) {
func main() {
debugLog := flag.String("log", "", "Verbose output")
localSalt := flag.String("local-salt", "", "Use local salt minion")
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
cert := flag.String("cert", "", "Path to ca.pem")
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
flag.Parse()
@@ -78,35 +78,35 @@ func main() {
}
if debug {
cmdArgs = append(cmdArgs, "--log", "DEBUG")
cmdArgs = append(cmdArgs, "-log", "debug")
}
if len(strings.TrimSpace(*localSalt)) != 0 {
cmdArgs = append(cmdArgs, "--local-salt", *localSalt)
if *silent {
cmdArgs = append(cmdArgs, "-silent")
}
if len(strings.TrimSpace(*localMesh)) != 0 {
cmdArgs = append(cmdArgs, "--local-mesh", *localMesh)
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
}
if len(strings.TrimSpace(*cert)) != 0 {
cmdArgs = append(cmdArgs, "--cert", *cert)
cmdArgs = append(cmdArgs, "-cert", *cert)
}
if len(strings.TrimSpace(*timeout)) != 0 {
cmdArgs = append(cmdArgs, "--timeout", *timeout)
cmdArgs = append(cmdArgs, "-timeout", *timeout)
}
if Rdp == "1" {
cmdArgs = append(cmdArgs, "--rdp")
cmdArgs = append(cmdArgs, "-rdp")
}
if Ping == "1" {
cmdArgs = append(cmdArgs, "--ping")
cmdArgs = append(cmdArgs, "-ping")
}
if Power == "1" {
cmdArgs = append(cmdArgs, "--power")
cmdArgs = append(cmdArgs, "-power")
}
if debug {
@@ -133,7 +133,7 @@ func main() {
os.Exit(1)
}
time.Sleep(20 * time.Second)
time.Sleep(10 * time.Second)
fmt.Println("Installation starting.")
cmd := exec.Command(tacrmm, cmdArgs...)

View File

@@ -36,7 +36,7 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
write-host ('Extracting...')
Start-Sleep -s 20
Start-Sleep -s 10
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
exit 0
}

View File

@@ -11,12 +11,11 @@ class Command(BaseCommand):
help = "Sets up initial mesh central configuration"
async def websocket_call(self, mesh_settings):
token = get_auth_token(
mesh_settings.mesh_username, mesh_settings.mesh_token
)
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
if settings.MESH_WS_URL:
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
if settings.DOCKER_BUILD:
site = mesh_settings.mesh_site.replace("https", "ws")
uri = f"{site}:443/control.ashx?auth={token}"
else:
site = mesh_settings.mesh_site.replace("https", "wss")
uri = f"{site}/control.ashx?auth={token}"

View File

@@ -12,12 +12,11 @@ class Command(BaseCommand):
async def websocket_call(self, mesh_settings):
token = get_auth_token(
mesh_settings.mesh_username, mesh_settings.mesh_token
)
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
if settings.MESH_WS_URL:
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
if settings.DOCKER_BUILD:
site = mesh_settings.mesh_site.replace("https", "ws")
uri = f"{site}:443/control.ashx?auth={token}"
else:
site = mesh_settings.mesh_site.replace("https", "wss")
uri = f"{site}/control.ashx?auth={token}"
@@ -52,11 +51,17 @@ class Command(BaseCommand):
try:
# Check for Mesh Username
if not mesh_settings.mesh_username or settings.MESH_USERNAME != mesh_settings.mesh_username:
if (
not mesh_settings.mesh_username
or settings.MESH_USERNAME != mesh_settings.mesh_username
):
mesh_settings.mesh_username = settings.MESH_USERNAME
# Check for Mesh Site
if not mesh_settings.mesh_site or settings.MESH_SITE != mesh_settings.mesh_site:
if (
not mesh_settings.mesh_site
or settings.MESH_SITE != mesh_settings.mesh_site
):
mesh_settings.mesh_site = settings.MESH_SITE
# Check for Mesh Token
@@ -75,7 +80,9 @@ class Command(BaseCommand):
return
try:
asyncio.get_event_loop().run_until_complete(self.websocket_call(mesh_settings))
asyncio.get_event_loop().run_until_complete(
self.websocket_call(mesh_settings)
)
self.stdout.write("Initial Mesh Central setup complete")
except websockets.exceptions.ConnectionClosedError:
self.stdout.write(

View File

@@ -1,9 +1,7 @@
import os
import shutil
import subprocess
import sys
import tempfile
from time import sleep
from django.core.management.base import BaseCommand
@@ -15,22 +13,10 @@ class Command(BaseCommand):
help = "Collection of tasks to run after updating the rmm, after migrations"
def handle(self, *args, **kwargs):
if not os.path.exists("/usr/local/bin/goversioninfo"):
self.stdout.write(self.style.ERROR("*" * 100))
self.stdout.write("\n")
self.stdout.write(
self.style.ERROR(
"ERROR: New update script available. Delete this one and re-download."
)
)
self.stdout.write("\n")
sys.exit(1)
# 10-16-2020 changed the type of the agent's 'disks' model field
# from a dict of dicts, to a list of disks in the golang agent
# the following will convert dicts to lists for agent's still on the python agent
agents = Agent.objects.all()
agents = Agent.objects.only("pk", "disks")
for agent in agents:
if agent.disks is not None and isinstance(agent.disks, dict):
new = []
@@ -43,88 +29,17 @@ class Command(BaseCommand):
self.style.SUCCESS(f"Migrated disks on {agent.hostname}")
)
# sync modules. split into chunks of 60 agents to not overload the salt master
agents = Agent.objects.all()
online = [i.salt_id for i in agents if i.status == "online"]
chunks = (online[i : i + 60] for i in range(0, len(online), 60))
self.stdout.write(self.style.SUCCESS("Syncing agent modules..."))
for chunk in chunks:
r = Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
sleep(5)
has_old_config = True
rmm_conf = "/etc/nginx/sites-available/rmm.conf"
if os.path.exists(rmm_conf):
with open(rmm_conf) as f:
for line in f:
if "location" and "builtin" in line:
has_old_config = False
break
if has_old_config:
new_conf = """
location /builtin/ {
internal;
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
alias /srv/salt/scripts/;
}
"""
after_this = """
location /saltscripts/ {
internal;
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
alias /srv/salt/scripts/userdefined/;
}
"""
self.stdout.write(self.style.ERROR("*" * 100))
self.stdout.write("\n")
self.stdout.write(
self.style.ERROR(
"WARNING: A recent update requires you to manually edit your nginx config"
)
)
self.stdout.write("\n")
self.stdout.write(
self.style.ERROR("Please add the following location block to ")
+ self.style.WARNING(rmm_conf)
)
self.stdout.write(self.style.SUCCESS(new_conf))
self.stdout.write("\n")
self.stdout.write(
self.style.ERROR(
"You can paste the above right after the following block that's already in your nginx config:"
)
)
self.stdout.write(after_this)
self.stdout.write("\n")
self.stdout.write(
self.style.ERROR(
"Make sure to replace rmm.yourwebsite.com with your domain"
)
)
self.stdout.write(
self.style.ERROR("After editing, restart nginx with the command ")
+ self.style.WARNING("sudo systemctl restart nginx")
)
self.stdout.write("\n")
self.stdout.write(self.style.ERROR("*" * 100))
input("Press Enter to continue...")
# install go
if not os.path.exists("/usr/local/rmmgo/"):
self.stdout.write(self.style.SUCCESS("Installing golang"))
subprocess.run("sudo mkdir -p /usr/local/rmmgo", shell=True)
tmpdir = tempfile.mkdtemp()
r = subprocess.run(
f"wget https://golang.org/dl/go1.15.linux-amd64.tar.gz -P {tmpdir}",
f"wget https://golang.org/dl/go1.15.5.linux-amd64.tar.gz -P {tmpdir}",
shell=True,
)
gotar = os.path.join(tmpdir, "go1.15.linux-amd64.tar.gz")
gotar = os.path.join(tmpdir, "go1.15.5.linux-amd64.tar.gz")
subprocess.run(f"tar -xzf {gotar} -C {tmpdir}", shell=True)

View File

@@ -6,4 +6,4 @@ class Command(BaseCommand):
help = "Reload Nats"
def handle(self, *args, **kwargs):
reload_nats()
reload_nats()

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0008_auto_20200910_1434'),
("core", "0008_auto_20200910_1434"),
]
operations = [
migrations.AddField(
model_name='coresettings',
name='agent_auto_update',
model_name="coresettings",
name="agent_auto_update",
field=models.BooleanField(default=True),
),
]

View File

@@ -6,28 +6,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0009_coresettings_agent_auto_update'),
("core", "0009_coresettings_agent_auto_update"),
]
operations = [
migrations.AddField(
model_name='coresettings',
name='created_by',
model_name="coresettings",
name="created_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='coresettings',
name='created_time',
model_name="coresettings",
name="created_time",
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='coresettings',
name='modified_by',
model_name="coresettings",
name="modified_by",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='coresettings',
name='modified_time',
model_name="coresettings",
name="modified_time",
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -7,28 +7,34 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0010_auto_20201002_1257'),
("core", "0010_auto_20201002_1257"),
]
operations = [
migrations.AddField(
model_name='coresettings',
name='sms_alert_recipients',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None),
model_name="coresettings",
name="sms_alert_recipients",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(blank=True, max_length=255, null=True),
blank=True,
default=list,
null=True,
size=None,
),
),
migrations.AddField(
model_name='coresettings',
name='twilio_account_sid',
model_name="coresettings",
name="twilio_account_sid",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='coresettings',
name='twilio_auth_token',
model_name="coresettings",
name="twilio_auth_token",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='coresettings',
name='twilio_number',
model_name="coresettings",
name="twilio_number",
field=models.CharField(blank=True, max_length=255, null=True),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.4 on 2021-01-10 18:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0011_auto_20201026_0719"),
]
operations = [
migrations.AddField(
model_name="coresettings",
name="check_history_prune_days",
field=models.PositiveIntegerField(default=30),
),
]

View File

@@ -49,6 +49,8 @@ class CoreSettings(BaseAuditModel):
default_time_zone = models.CharField(
max_length=255, choices=TZ_CHOICES, default="America/Los_Angeles"
)
# removes check history older than days
check_history_prune_days = models.PositiveIntegerField(default=30)
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
mesh_site = models.CharField(max_length=255, null=True, blank=True, default="")

View File

@@ -4,8 +4,10 @@ from loguru import logger
from django.conf import settings
from django.utils import timezone as djangotime
from tacticalrmm.celery import app
from core.models import CoreSettings
from autotasks.models import AutomatedTask
from autotasks.tasks import delete_win_task_schedule
from checks.tasks import prune_check_history
logger.configure(**settings.LOG_CONFIG)
@@ -25,3 +27,7 @@ def core_maintenance_tasks():
if now > task_time_utc:
delete_win_task_schedule.delay(task.pk)
# remove old CheckHistory data
older_than = CoreSettings.objects.first().check_history_prune_days
prune_check_history.delay(older_than)

View File

@@ -1,5 +1,8 @@
from tacticalrmm.test import TacticalTestCase
from core.tasks import core_maintenance_tasks
from unittest.mock import patch
from core.models import CoreSettings
from model_bakery import baker, seq
class TestCoreTasks(TacticalTestCase):
@@ -31,3 +34,95 @@ class TestCoreTasks(TacticalTestCase):
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("get", url)
@patch("automation.tasks.generate_all_agent_checks_task.delay")
def test_edit_coresettings(self, generate_all_agent_checks_task):
url = "/core/editsettings/"
# setup
policies = baker.make("Policy", _quantity=2)
# test normal request
data = {
"smtp_from_email": "newexample@example.com",
"mesh_token": "New_Mesh_Token",
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(
CoreSettings.objects.first().smtp_from_email, data["smtp_from_email"]
)
self.assertEqual(CoreSettings.objects.first().mesh_token, data["mesh_token"])
generate_all_agent_checks_task.assert_not_called()
# test adding policy
data = {
"workstation_policy": policies[0].id,
"server_policy": policies[1].id,
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id)
self.assertEqual(
CoreSettings.objects.first().workstation_policy.id, policies[0].id
)
self.assertEqual(generate_all_agent_checks_task.call_count, 2)
generate_all_agent_checks_task.reset_mock()
# test remove policy
data = {
"workstation_policy": "",
}
r = self.client.patch(url, data)
self.assertEqual(r.status_code, 200)
self.assertEqual(CoreSettings.objects.first().workstation_policy, None)
self.assertEqual(generate_all_agent_checks_task.call_count, 1)
self.check_not_authenticated("patch", url)
@patch("tacticalrmm.utils.reload_nats")
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks, reload_nats):
url = "/core/servermaintenance/"
agents = baker.make_recipe("agents.online_agent", _quantity=3)
# test with empty data
r = self.client.post(url, {})
self.assertEqual(r.status_code, 400)
# test with invalid action
data = {"action": "invalid_action"}
r = self.client.post(url, data)
self.assertEqual(r.status_code, 400)
# test reload nats action
data = {"action": "reload_nats"}
r = self.client.post(url, data)
self.assertEqual(r.status_code, 200)
reload_nats.assert_called_once()
# test prune db with no tables
data = {"action": "prune_db"}
r = self.client.post(url, data)
self.assertEqual(r.status_code, 400)
# test prune db with tables
data = {
"action": "prune_db",
"prune_tables": ["audit_logs", "agent_outages", "pending_actions"],
}
r = self.client.post(url, data)
self.assertEqual(r.status_code, 200)
# test remove orphaned tasks
data = {"action": "rm_orphaned_tasks"}
r = self.client.post(url, data)
self.assertEqual(r.status_code, 200)
remove_orphaned_win_tasks.assert_called()
self.check_not_authenticated("post", url)

View File

@@ -8,4 +8,5 @@ urlpatterns = [
path("version/", views.version),
path("emailtest/", views.email_test),
path("dashinfo/", views.dashboard_info),
path("servermaintenance/", views.server_maintenance),
]

View File

@@ -42,21 +42,19 @@ def get_core_settings(request):
@api_view(["PATCH"])
def edit_settings(request):
settings = CoreSettings.objects.first()
serializer = CoreSettingsSerializer(instance=settings, data=request.data)
coresettings = CoreSettings.objects.first()
old_server_policy = coresettings.server_policy
old_workstation_policy = coresettings.workstation_policy
serializer = CoreSettingsSerializer(instance=coresettings, data=request.data)
serializer.is_valid(raise_exception=True)
new_settings = serializer.save()
# check if default policies changed
if settings.server_policy != new_settings.server_policy:
generate_all_agent_checks_task.delay(
mon_type="server", clear=True, create_tasks=True
)
if old_server_policy != new_settings.server_policy:
generate_all_agent_checks_task.delay(mon_type="server", create_tasks=True)
if settings.workstation_policy != new_settings.workstation_policy:
generate_all_agent_checks_task.delay(
mon_type="workstation", clear=True, create_tasks=True
)
if old_workstation_policy != new_settings.workstation_policy:
generate_all_agent_checks_task.delay(mon_type="workstation", create_tasks=True)
return Response("ok")
@@ -69,7 +67,13 @@ def version(request):
@api_view()
def dashboard_info(request):
return Response(
{"trmm_version": settings.TRMM_VERSION, "dark_mode": request.user.dark_mode}
{
"trmm_version": settings.TRMM_VERSION,
"dark_mode": request.user.dark_mode,
"show_community_scripts": request.user.show_community_scripts,
"dbl_click_action": request.user.agent_dblclick_action,
"default_agent_tbl_tab": request.user.default_agent_tbl_tab,
}
)
@@ -84,3 +88,56 @@ def email_test(request):
return notify_error(r)
return Response("Email Test OK!")
@api_view(["POST"])
def server_maintenance(request):
from tacticalrmm.utils import reload_nats
if "action" not in request.data:
return notify_error("The data is incorrect")
if request.data["action"] == "reload_nats":
reload_nats()
return Response("Nats configuration was reloaded successfully.")
if request.data["action"] == "rm_orphaned_tasks":
from agents.models import Agent
from autotasks.tasks import remove_orphaned_win_tasks
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
online = [i for i in agents if i.status == "online"]
for agent in online:
remove_orphaned_win_tasks.delay(agent.pk)
return Response(
"The task has been initiated. Check the Debug Log in the UI for progress."
)
if request.data["action"] == "prune_db":
from agents.models import AgentOutage
from logs.models import AuditLog, PendingAction
if "prune_tables" not in request.data:
return notify_error("The data is incorrect.")
tables = request.data["prune_tables"]
records_count = 0
if "agent_outages" in tables:
agentoutages = AgentOutage.objects.exclude(recovery_time=None)
records_count += agentoutages.count()
agentoutages.delete()
if "audit_logs" in tables:
auditlogs = AuditLog.objects.filter(action="check_run")
records_count += auditlogs.count()
auditlogs.delete()
if "pending_actions" in tables:
pendingactions = PendingAction.objects.filter(status="completed")
records_count += pendingactions.count()
pendingactions.delete()
return Response(f"{records_count} records were pruned from the database")
return notify_error("The data is incorrect")

View File

@@ -6,13 +6,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0007_auditlog_debug_info'),
("logs", "0007_auditlog_debug_info"),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='action',
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
model_name="auditlog",
name="action",
field=models.CharField(
choices=[
("login", "User Login"),
("failed_login", "Failed User Login"),
("delete", "Delete Object"),
("modify", "Modify Object"),
("add", "Add Object"),
("view", "View Object"),
("check_run", "Check Run"),
("task_run", "Task Run"),
("remote_session", "Remote Session"),
("execute_script", "Execute Script"),
("execute_command", "Execute Command"),
],
max_length=100,
),
),
]

View File

@@ -6,13 +6,29 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0008_auto_20201110_1431'),
("logs", "0008_auto_20201110_1431"),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='action',
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
model_name="auditlog",
name="action",
field=models.CharField(
choices=[
("login", "User Login"),
("failed_login", "Failed User Login"),
("delete", "Delete Object"),
("modify", "Modify Object"),
("add", "Add Object"),
("view", "View Object"),
("check_run", "Check Run"),
("task_run", "Task Run"),
("agent_install", "Agent Install"),
("remote_session", "Remote Session"),
("execute_script", "Execute Script"),
("execute_command", "Execute Command"),
],
max_length=100,
),
),
]

Some files were not shown because too many files have changed in this diff Show More