Compare commits

...

217 Commits

Author SHA1 Message Date
wh1te909
9011148adf Release 0.8.4 2021-09-09 19:14:11 +00:00
wh1te909
897d0590d2 bump version 2021-09-09 19:10:28 +00:00
wh1te909
33b33e8458 retry websocket on 1006 error 2021-09-09 19:07:00 +00:00
wh1te909
7758f5c187 add a file to ignore 2021-09-09 18:47:28 +00:00
wh1te909
a9a0df9699 fix tests 2021-09-09 16:26:06 +00:00
wh1te909
216a9ed035 speed up some views 2021-09-09 06:50:30 +00:00
wh1te909
35d61b6a6c add missing trailing slashes fixes #43 2021-09-09 05:55:27 +00:00
wh1te909
5fb72cea53 add types to url 2021-09-09 05:54:34 +00:00
Dan
d54d021e9f Merge pull request #697 from silversword411/develop
Tweaks
2021-09-08 18:17:42 -07:00
silversword411
06e78311df Tweaks 2021-09-08 21:04:35 -04:00
Dan
df720f95ca Merge pull request #696 from silversword411/develop
Unsupported Officially...no we really mean it
2021-09-08 17:06:48 -07:00
Dan
00faff34d3 Merge pull request #695 from aaronstuder/patch-1
Update install_server.md
2021-09-08 17:06:35 -07:00
silversword411
2b5b3ea4f3 Unsupported Officially...no we really mean it 2021-09-08 18:38:40 -04:00
sadnub
95e608d0b4 fix agent saying that it was approving updates when it actually didn't 2021-09-08 17:37:02 -04:00
sadnub
1d55bf87dd fix audit and debug log not refreshing on agent change 2021-09-08 17:36:30 -04:00
aaronstuder
1220ce53eb Update install_server.md 2021-09-08 12:55:53 -04:00
sadnub
2006218f87 honor block_dashboard_login from the login 2fa verification view 2021-09-08 10:29:58 -04:00
sadnub
40f427a387 add trailing slash to missing urls. Potentially fixes #43 2021-09-08 10:28:54 -04:00
sadnub
445e95baed formatting 2021-09-08 10:27:42 -04:00
sadnub
67fbc9ad33 make installer user use the new block_dasboard_login property 2021-09-06 22:42:32 -04:00
sadnub
1253e9e465 formatting 2021-09-06 20:10:14 -04:00
sadnub
21069432e8 fix tests 2021-09-06 20:06:23 -04:00
sadnub
6facf6a324 fix nginx on docker dev 2021-09-06 12:54:37 -04:00
sadnub
7556197485 move policy processing on any agent changes to celery task 2021-09-06 11:47:07 -04:00
wh1te909
8dddd2d896 Release 0.8.3 2021-09-06 09:30:51 +00:00
wh1te909
f319c95c2b bump version 2021-09-06 09:10:00 +00:00
wh1te909
8e972b0907 add docs for api keys 2021-09-06 08:50:18 +00:00
sadnub
395e400215 fix docker build script 2021-09-05 23:52:33 -04:00
sadnub
3685e3111f fix docker prod spinup. Move api container to uwsgi 2021-09-05 23:49:10 -04:00
sadnub
7bb1c75dc6 add auditing to objects URLAction, KeyStore, CustomFields and also audit when url actions are run 2021-09-05 12:32:37 -04:00
sadnub
b20834929c formatting 2021-09-05 11:35:15 -04:00
sadnub
181891757e fix tasks with assigned checks being added to automation policy 2021-09-05 11:22:21 -04:00
wh1te909
b16feeae44 fix debug log 2021-09-05 08:45:41 +00:00
wh1te909
684e049f27 typo 2021-09-05 06:07:46 +00:00
wh1te909
8cebd901b2 update reqs 2021-09-05 01:40:25 +00:00
wh1te909
3c96beb8fb fix celery memory leak 2021-09-04 23:40:57 +00:00
Dan
8a46459cf9 Merge pull request #683 from silversword411/develop
wip script additions and docs updates
2021-09-04 15:46:31 -07:00
Dan
be5c3e9daa Merge pull request #673 from juaromu/docs-securing-nginx
Securing NGINX added to docs
2021-09-04 15:45:37 -07:00
wh1te909
e44453877c skip sw errors fixes #682 2021-09-04 22:23:35 +00:00
wh1te909
f772a4ec56 allow users to reset their own password/2fa fixes #686 2021-09-04 22:15:51 +00:00
wh1te909
44182ec683 fix render error if results are null 2021-09-03 06:29:27 +00:00
wh1te909
b9ab13fa53 hide status field under properly implemented 2021-09-03 06:28:27 +00:00
wh1te909
2ad6721c95 fix pipeline 2021-09-03 05:45:31 +00:00
wh1te909
b7d0604e62 first/last name optional 2021-09-03 05:35:54 +00:00
wh1te909
a7518b4b26 black 2021-09-03 05:34:44 +00:00
wh1te909
50613f5d3e add api auth in settings, removed from local_settings 2021-09-03 05:31:44 +00:00
sadnub
f814767703 add tests and some ui fixes 2021-09-02 23:52:26 -04:00
sadnub
4af86d6456 set alert template on new agents 2021-09-02 21:36:35 -04:00
sadnub
f0a4f00c2d fix properties and block user dashboard access if denied 2021-09-02 21:32:18 -04:00
sadnub
4321affddb allow for creating special tokens for api access and bypassing two factor auth 2021-09-02 21:10:23 -04:00
silversword411
926ed55b9b docs update - Authorized users 2021-09-02 11:28:05 -04:00
silversword411
2ebf308565 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-09-02 10:33:36 -04:00
silversword411
1c5e736dce wip script network scanner 2021-09-02 10:33:25 -04:00
silversword411
b591f9f5b7 MOAR wips 2021-09-02 08:39:03 -04:00
silversword411
9724882578 wip script for print check 2021-09-02 08:23:05 -04:00
silversword411
ddef2df101 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-09-02 08:11:21 -04:00
silversword411
8af69c4284 adding alternate ssl to unsupported docs 2021-09-02 07:55:33 -04:00
silversword411
6ebe1ab467 adding alternate ssl to unsupported docs 2021-09-02 07:39:44 -04:00
silversword411
24e4d9cf6d docs Making docker howto visible 2021-09-02 05:21:51 -04:00
silversword411
f35fa0aa58 Troubleshooting docs update 2021-09-01 18:50:18 -04:00
wh1te909
4942f262f1 Release 0.8.2 2021-09-01 07:18:21 +00:00
wh1te909
a20b1a973e bump version 2021-09-01 07:18:09 +00:00
wh1te909
eae5e00706 allow filtering by overdue #674 2021-09-01 06:26:55 +00:00
silversword411
403762d862 wip script additions 2021-08-31 22:45:53 -04:00
sadnub
5c92d4b454 fix bug were script args weren't being substituted when testing scripts 2021-08-31 20:33:36 -04:00
wh1te909
38179b9d38 Release 0.8.1 2021-08-31 06:51:20 +00:00
wh1te909
8f510dde5a bump versions 2021-08-31 06:35:29 +00:00
wh1te909
be42d56e37 fix 500 error when trying to test newly added script 2021-08-31 06:16:40 +00:00
Juan J. Romero
6294530fa3 Securing NGINX added to docs 2021-08-31 15:45:47 +10:00
sadnub
c5c8f5fab1 formatting 2021-08-30 22:32:16 -04:00
sadnub
3d41d79078 change directory for nats configuration file for DOCKER. Fix nats-api commands in dev containers 2021-08-30 22:17:21 -04:00
sadnub
3005061a11 formatting 2021-08-30 08:06:15 -04:00
sadnub
65ea46f457 strip whitespace before processing collector output 2021-08-30 07:42:54 -04:00
wh1te909
eca8f32570 Release 0.8.0 2021-08-30 06:32:39 +00:00
wh1te909
8d1ef19c61 bump version 2021-08-30 06:28:40 +00:00
wh1te909
71d87d866b change schedule 2021-08-30 05:49:09 +00:00
wh1te909
c4f88bdce7 update for new debug log 2021-08-30 03:45:35 +00:00
sadnub
f722a115b1 update alerting docs and add database maintenance page 2021-08-29 16:54:05 -04:00
sadnub
1583beea7b update script docs 2021-08-29 16:25:33 -04:00
wh1te909
5b388c587b update python 2021-08-29 08:19:35 +00:00
wh1te909
e254923167 update mesh/nats 2021-08-29 08:13:04 +00:00
wh1te909
b0dbdd7803 fix field 2021-08-29 07:16:09 +00:00
wh1te909
aa6ebe0122 fix pagination 2021-08-29 03:40:14 +00:00
wh1te909
c5f179bab8 update nats-api 2021-08-29 03:39:58 +00:00
sadnub
e65cb86638 rework script testing a bit. Fix mismatch object properties and props 2021-08-28 10:33:18 -04:00
wh1te909
a349998640 add watcher 2021-08-28 06:48:00 +00:00
wh1te909
43f60610b8 fix props 2021-08-28 06:36:03 +00:00
wh1te909
46d042087a fix row name 2021-08-28 06:32:50 +00:00
sadnub
ee214727f6 format agent history table 2021-08-28 01:00:17 -04:00
sadnub
b4c1ec55ec fix env.example 2021-08-27 22:12:25 -04:00
Dan
0fdd54f710 Merge pull request #664 from bc24fl/change-community-script-add-domain-rename-capability-and-refactor
Change Win_Rename_Computer.ps1 community script to add domain joined …
2021-08-25 15:21:55 -07:00
wh1te909
4f0cdeaec0 reduce nats max payload as it will be enforced in future nats update 2021-08-25 21:14:35 +00:00
wh1te909
e5cc38857c update quasar conf to support quasar app 3.1.0 (webpack-dev-server 4.0.0) 2021-08-25 21:13:05 +00:00
wh1te909
fe4b9d71c0 update reqs 2021-08-25 21:11:39 +00:00
wh1te909
5c1181e40e Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2021-08-23 04:25:14 +00:00
wh1te909
8b71832bc2 update reqs 2021-08-23 04:19:21 +00:00
Irving
8412ed6065 Change Win_Rename_Computer.ps1 community script to add domain joined computer rename functionality and refactor per standards. 2021-08-22 16:36:19 -04:00
Dan
207f6cdc7c Merge pull request #661 from bc24fl/fix-doc-typo-in-alert-page
Fixed typo in documentation alert page
2021-08-21 23:44:09 -07:00
Dan
b0b51f5730 Merge pull request #660 from silversword411/develop
Script library - uninstall software
2021-08-21 23:38:46 -07:00
wh1te909
def6833ef0 new pipeline agent 2021-08-21 15:26:25 +00:00
wh1te909
c528dd3de1 attempt to fix pipelines 2021-08-20 08:23:16 +00:00
wh1te909
544270e35d new pipeline agent 2021-08-20 07:35:02 +00:00
bc24fl
657e029fee Fixed typo in documentation alert page 2021-08-19 15:48:46 -04:00
silversword411
49469d7689 docs update - adding to docker instructions 2021-08-18 22:59:28 -04:00
silversword411
4f0dd452c8 docs troubleshooting tweaks 2021-08-18 22:39:19 -04:00
silversword411
3f741eab11 Script library - uninstall software 2021-08-18 12:01:41 -04:00
Dan
190368788f Merge pull request #654 from NiceGuyIT/develop
Bitdefender install script:  Improve error detection and logging
2021-08-11 23:54:14 -07:00
Dan
8306a3f566 Merge pull request #649 from silversword411/develop
Docs and scripts updates
2021-08-11 23:53:24 -07:00
silversword411
988c134c09 choco typo fixes 2021-08-03 00:24:14 -04:00
silversword411
af0a4d578b Community Script Replacing Choco upgrade script 2021-08-03 00:06:38 -04:00
sadnub
9bc0abc831 fix favorited community scripts showing up if community scripts are hidden. Fix delete script in Script Manager 2021-08-02 17:48:13 -04:00
David Randall
41410e99e7 Improve error detection and logging 2021-08-02 12:43:39 -04:00
David Randall
deae04d5ff Merge branch 'wh1te909:develop' into develop 2021-08-02 12:37:49 -04:00
David Randall
7d6eeffd66 Improve error detection and logging 2021-08-02 12:33:32 -04:00
sadnub
629858e095 log django 500 errors (for easier debugging) to new log file 2021-08-02 09:35:41 -04:00
sadnub
dfdb628347 change favorited script run on agent to opent he Run Script modal with the script and defaults populated 2021-08-02 09:34:17 -04:00
sadnub
6e48b28fc9 fix filterable dropdown and prepopulating select value 2021-08-02 09:33:24 -04:00
sadnub
3ba450e837 fix replace values function 2021-08-02 09:21:07 -04:00
sadnub
688ed93500 allow url actions to be run against clients and sites 2021-08-01 00:17:48 -04:00
sadnub
7268ba20a2 Finished script snippet feature 2021-07-31 15:22:31 -04:00
sadnub
63d9e73098 fix tests 2021-07-31 13:29:51 -04:00
sadnub
564c048f90 add missing migration 2021-07-31 13:07:48 -04:00
sadnub
5f801c74d5 allowed dismissing persistent modals on Esc press. allow filtering on certain scripts and agent dropdowns. moved other dropdowns to tactical dropdown. Fixes with bulk actions 2021-07-31 11:56:47 -04:00
sadnub
b405fbc09a handle a few more errors when auth token is expired 2021-07-31 11:54:28 -04:00
sadnub
7a64c2eb49 update quasar 2021-07-31 11:54:00 -04:00
sadnub
c93cbac3b1 rework bulk action modal. start running bulk actions on next agent checkin 2021-07-30 12:48:47 -04:00
sadnub
8b0f67b8a6 actually stop the unauthorized console errors wwith websocket connection 2021-07-30 12:46:15 -04:00
sadnub
0d96129f2d get dropdown filtering working on custom tactical dropdown component 2021-07-30 12:45:26 -04:00
sadnub
54ee12d2b3 rework script manager and modals to composition api. Start on script snippets 2021-07-29 19:41:32 -04:00
silversword411
92fc042103 Win 10 upgrade script removing license check 2021-07-29 00:50:17 -04:00
silversword411
9bb7016fa7 Win 10 upgrade script commenting 2021-07-28 16:45:12 -04:00
silversword411
3ad56feafb Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-07-27 19:04:58 -04:00
silversword411
14d59c3dec Sorting alphabetical and fixing pic 2021-07-27 19:04:40 -04:00
silversword411
443f419770 wip script add 2021-07-27 19:04:40 -04:00
silversword411
ddbb58755e Docs updates 2021-07-27 19:04:39 -04:00
silversword411
524283b9ff adding db maintenance to docs 2021-07-27 19:04:39 -04:00
silversword411
fb178d2944 add wip script 2021-07-27 19:04:39 -04:00
silversword411
52f4ad9403 add library wifi password retrieval script 2021-07-27 19:04:38 -04:00
silversword411
ba0c08ef1f Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-07-27 19:03:36 -04:00
silversword411
9e19b1e04c wip script add 2021-07-27 19:02:22 -04:00
silversword411
b2118201b1 Sorting alphabetical and fixing pic 2021-07-25 15:27:05 -04:00
sadnub
b4346aa056 formatting 2021-07-21 20:41:11 -04:00
sadnub
b599f05aab fix version 2021-07-21 20:35:57 -04:00
sadnub
93d78a0200 add ipware req 2021-07-21 20:33:42 -04:00
silversword411
449957b2eb Docs updates 2021-07-21 15:02:56 -04:00
sadnub
0a6d44bad3 Fixes #561 2021-07-21 14:48:59 -04:00
sadnub
17ceaaa503 allow skipping alert resolved/failure actions on types of alerts 2021-07-21 14:30:25 -04:00
sadnub
d70803b416 add audit log retention 2021-07-21 13:49:34 -04:00
sadnub
aa414d4702 fix auditing on models that override the save method. Added Alert Template anmd Role to auditable models 2021-07-21 13:33:15 -04:00
sadnub
f24e1b91ea stop ws from reconnecting on unauthorized error 2021-07-21 10:53:55 -04:00
sadnub
1df8163090 add role and alert template to audit logging 2021-07-21 00:28:51 -04:00
sadnub
659ddf6a45 fix docker build script 2021-07-20 23:11:15 -04:00
sadnub
e110068da4 add public IP logging to audit log and agent login tables 2021-07-20 23:10:51 -04:00
sadnub
c943f6f936 stop the ws connection from retrying when logging out or session is expired 2021-07-20 16:46:16 -04:00
silversword411
cb1fe7fe54 adding db maintenance to docs 2021-07-19 10:44:38 -04:00
silversword411
593f1f63cc add wip script 2021-07-19 10:35:54 -04:00
silversword411
66aa70cf75 Merge branch 'develop' of https://github.com/silversword411/tacticalrmm into develop 2021-07-18 21:08:18 -04:00
silversword411
304be99067 add library wifi password retrieval script 2021-07-18 21:08:04 -04:00
silversword411
9a01ec35f4 add library wifi password retrieval script 2021-07-18 21:04:16 -04:00
sadnub
bfa5b4fba5 allow persistent mesh config and fix mongodb password uri issue 2021-07-17 15:57:35 -04:00
Dan
d2f63ef353 Merge pull request #641 from silversword411/develop
Docs and scripts additions
2021-07-17 10:57:07 -07:00
Dan
50f334425e Merge pull request #640 from bern-spl/patch-1
Update README.md
2021-07-17 10:56:33 -07:00
silversword411
f78212073c fix json 2021-07-17 11:21:39 -04:00
silversword411
5c655f5a82 Adding grafana to docs 2021-07-17 11:06:02 -04:00
silversword411
6a6446bfcb Adding configuring email to docs 2021-07-17 10:48:22 -04:00
silversword411
b60a3a5e50 Adding scripts 2021-07-17 10:33:31 -04:00
Bernard Blundell
02ccbab8e5 Update README.md 2021-07-17 14:51:09 +01:00
wh1te909
023ff3f964 update bin [skip ci] 2021-07-17 07:16:38 +00:00
wh1te909
7c5e8df3b8 fix tests 2021-07-17 07:11:29 +00:00
wh1te909
56fdab260b add/refactor task 2021-07-17 06:59:21 +00:00
wh1te909
7cce49dc1a deprecate an endpoint 2021-07-17 06:40:45 +00:00
wh1te909
2dfaafb20b fix bug where sms attempting to be sent when not configured 2021-07-17 06:35:31 +00:00
wh1te909
6138a5bf54 move some funcs to go 2021-07-17 05:13:40 +00:00
wh1te909
828c67cc00 fix tests 2021-07-17 00:33:21 +00:00
wh1te909
e70cd44e18 add history to send command 2021-07-16 21:45:16 +00:00
wh1te909
efa5ac5edd more run script rework 2021-07-16 06:11:40 +00:00
wh1te909
788b11e759 add fields to agent history 2021-07-14 07:38:31 +00:00
wh1te909
d049d7a61f update reqs 2021-07-14 07:36:55 +00:00
Dan
075c833b58 Merge pull request #626 from sadnub/runscript-rework
Agent Tabs/Run Script WIP
2021-07-13 11:43:38 -07:00
Dan
e9309c2a96 Merge pull request #638 from silversword411/develop
Docs and scripts update
2021-07-12 22:24:12 -07:00
silversword411
a592d2b397 Adding scripts to library and WIP 2021-07-13 00:21:43 -04:00
silversword411
3ad1805ac0 tweak faq 2021-07-12 23:51:16 -04:00
Dan
dbc2bab698 Merge pull request #632 from silversword411/develop
script library and docs updates
2021-07-12 08:51:13 -07:00
silversword411
79eec5c299 Bitdefender GravityZone Docs 2021-07-11 14:10:10 -04:00
silversword411
7754b0c575 howitallworks tweaks 2021-07-11 13:55:37 -04:00
silversword411
be4289ce76 Docs update 2021-07-11 13:26:15 -04:00
silversword411
67f5226270 add BitDefender Gravity Zone Install script 2021-07-10 12:42:27 -04:00
sadnub
b6d77c581b fix styling 2021-07-09 21:13:35 -04:00
sadnub
d84bf47d04 added script cloning functionality 2021-07-09 18:47:28 -04:00
sadnub
aba3a7bb9e fix and add tests 2021-07-09 18:00:28 -04:00
sadnub
6281736d89 implement test script in script edit 2021-07-09 08:03:53 -04:00
sadnub
94d96f89d3 implement run script save to custom field and agent notes 2021-07-09 00:16:15 -04:00
sadnub
4b55f9dead add tests and minor fixes 2021-07-08 22:02:02 -04:00
sadnub
5c6dce94df fix broken tests 2021-07-08 13:02:50 -04:00
wh1te909
f7d8f9c7f5 fix mkdocs warning 2021-07-08 06:39:32 +00:00
Dan
053df24f9c Merge pull request #627 from silversword411/develop
docs update and script tweak
2021-07-07 23:33:00 -07:00
silversword411
1dc470e434 powershell upgrade 2021-07-07 22:17:11 -04:00
silversword411
cfd8773267 wip script add 2021-07-07 22:13:17 -04:00
silversword411
67045cf6c1 docs tweaks 2021-07-07 22:00:52 -04:00
sadnub
ddfb9e7239 run script rework start 2021-07-07 19:28:52 -04:00
sadnub
9f6eed5472 setup pruning tasks 2021-07-07 19:28:52 -04:00
sadnub
15a1e2ebcb add agent history 2021-07-07 19:28:52 -04:00
sadnub
fcfe450b07 finish debug and audit rework 2021-07-07 19:28:52 -04:00
sadnub
a69bbb3bc9 audit manager rework wip 2021-07-07 19:28:52 -04:00
sadnub
6d2559cfc1 debug log rework 2021-07-07 19:28:52 -04:00
sadnub
b3a62615f3 moved debug log to database. modified frontend to composition api. moved a few mixins. 2021-07-07 19:28:52 -04:00
sadnub
57f5cca1cb debug modal rework into comp api 2021-07-07 19:28:52 -04:00
sadnub
6b9851f540 new agent tabs wip 2021-07-07 19:28:52 -04:00
silversword411
36fd203a88 Updating which registry tree to query 2021-07-07 16:00:48 -04:00
Dan
3f5cb5d61c Merge pull request #623 from meuchels/develop
Fix SC collector script to work with windows 7
2021-07-07 00:43:56 -07:00
Samuel Meuchel
862fc6a946 add newline to end 2021-07-06 19:45:03 -05:00
Samuel Meuchel
92c386ac0e Fixed ScreenConnect Collector script for ps 2.0 2021-07-06 19:41:16 -05:00
Samuel Meuchel
98a11a3645 add this exclusion for your ScreenConnect Deployment script to work. 2021-07-06 11:25:17 -05:00
Dan
62be0ed936 Merge pull request #610 from meuchels/develop
Add TeamViewer Script and Integration Docs
2021-07-01 14:45:45 -07:00
Samuel Meuchel
b7de73fd8a removed args from script json. 2021-07-01 08:19:12 -05:00
Samuel Meuchel
e2413f1af2 Add AnyDesk script collector and Integration Docs. 2021-06-30 17:33:48 -05:00
Samuel Meuchel
0e77d575c4 Add TeamViewer Script and Integration Docs 2021-06-30 15:09:04 -05:00
251 changed files with 11007 additions and 3920 deletions

View File

@@ -25,7 +25,8 @@ POSTGRES_PASS=postgrespass
# DEV SETTINGS # DEV SETTINGS
APP_PORT=80 APP_PORT=80
API_PORT=80 API_PORT=80
API_PROTOCOL=https://
HTTP_PROTOCOL=https HTTP_PROTOCOL=https
DOCKER_NETWORK="172.21.0.0/24" DOCKER_NETWORK=172.21.0.0/24
DOCKER_NGINX_IP="172.21.0.20" DOCKER_NGINX_IP=172.21.0.20
NATS_PORTS="4222:4222" NATS_PORTS=4222:4222

View File

@@ -1,4 +1,4 @@
FROM python:3.9.2-slim FROM python:3.9.6-slim
ENV TACTICAL_DIR /opt/tactical ENV TACTICAL_DIR /opt/tactical
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
@@ -13,12 +13,17 @@ EXPOSE 8000 8383 8005
RUN groupadd -g 1000 tactical && \ RUN groupadd -g 1000 tactical && \
useradd -u 1000 -g 1000 tactical useradd -u 1000 -g 1000 tactical
# Copy Dev python reqs # Copy nats-api file
COPY ./requirements.txt / COPY natsapi/bin/nats-api /usr/local/bin/
RUN chmod +x /usr/local/bin/nats-api
# Copy Docker Entrypoint # Copy dev python reqs
COPY ./entrypoint.sh / COPY .devcontainer/requirements.txt /
# Copy docker entrypoint.sh
COPY .devcontainer/entrypoint.sh /
RUN chmod +x /entrypoint.sh RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"] ENTRYPOINT ["/entrypoint.sh"]
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm

View File

@@ -6,8 +6,8 @@ services:
image: api-dev image: api-dev
restart: always restart: always
build: build:
context: . context: ..
dockerfile: ./api.dockerfile dockerfile: .devcontainer/api.dockerfile
command: ["tactical-api"] command: ["tactical-api"]
environment: environment:
API_PORT: ${API_PORT} API_PORT: ${API_PORT}
@@ -127,9 +127,6 @@ services:
init-dev: init-dev:
container_name: trmm-init-dev container_name: trmm-init-dev
image: api-dev image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
restart: on-failure restart: on-failure
command: ["tactical-init-dev"] command: ["tactical-init-dev"]
environment: environment:
@@ -156,9 +153,6 @@ services:
celery-dev: celery-dev:
container_name: trmm-celery-dev container_name: trmm-celery-dev
image: api-dev image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celery-dev"] command: ["tactical-celery-dev"]
restart: always restart: always
networks: networks:
@@ -174,9 +168,6 @@ services:
celerybeat-dev: celerybeat-dev:
container_name: trmm-celerybeat-dev container_name: trmm-celerybeat-dev
image: api-dev image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-celerybeat-dev"] command: ["tactical-celerybeat-dev"]
restart: always restart: always
networks: networks:
@@ -192,9 +183,6 @@ services:
websockets-dev: websockets-dev:
container_name: trmm-websockets-dev container_name: trmm-websockets-dev
image: api-dev image: api-dev
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-websockets-dev"] command: ["tactical-websockets-dev"]
restart: always restart: always
networks: networks:
@@ -221,6 +209,8 @@ services:
CERT_PRIV_KEY: ${CERT_PRIV_KEY} CERT_PRIV_KEY: ${CERT_PRIV_KEY}
APP_PORT: ${APP_PORT} APP_PORT: ${APP_PORT}
API_PORT: ${API_PORT} API_PORT: ${API_PORT}
API_PROTOCOL: ${API_PROTOCOL}
DEV: 1
networks: networks:
dev: dev:
ipv4_address: ${DOCKER_NGINX_IP} ipv4_address: ${DOCKER_NGINX_IP}
@@ -234,9 +224,6 @@ services:
container_name: trmm-mkdocs-dev container_name: trmm-mkdocs-dev
image: api-dev image: api-dev
restart: always restart: always
build:
context: .
dockerfile: ./api.dockerfile
command: ["tactical-mkdocs-dev"] command: ["tactical-mkdocs-dev"]
ports: ports:
- "8005:8005" - "8005:8005"

View File

@@ -78,24 +78,6 @@ DATABASES = {
} }
} }
REST_FRAMEWORK = {
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = '${MESH_USER}' MESH_USERNAME = '${MESH_USER}'
MESH_SITE = 'https://${MESH_HOST}' MESH_SITE = 'https://${MESH_HOST}'
MESH_TOKEN_KEY = '${MESH_TOKEN}' MESH_TOKEN_KEY = '${MESH_TOKEN}'

View File

@@ -3,6 +3,7 @@ asyncio-nats-client
celery celery
channels channels
channels_redis channels_redis
django-ipware
Django Django
django-cors-headers django-cors-headers
django-rest-knox django-rest-knox

1
.gitignore vendored
View File

@@ -48,3 +48,4 @@ nats-rmm.conf
.mypy_cache .mypy_cache
docs/site/ docs/site/
reset_db.sh reset_db.sh
run_go_cmd.py

View File

@@ -9,7 +9,7 @@ Tactical RMM is a remote monitoring & management tool for Windows computers, bui
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral) It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
# [LIVE DEMO](https://rmm.tacticalrmm.io/) # [LIVE DEMO](https://rmm.tacticalrmm.io/)
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app. Demo database resets every hour. A lot of features are disabled for obvious reasons due to the nature of this app.
### [Discord Chat](https://discord.gg/upGTkWp) ### [Discord Chat](https://discord.gg/upGTkWp)
@@ -35,4 +35,4 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
## Installation / Backup / Restore / Usage ## Installation / Backup / Restore / Usage
### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/) ### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/)

View File

@@ -15,4 +15,5 @@ class Command(BaseCommand):
username=uuid.uuid4().hex, username=uuid.uuid4().hex,
is_installer_user=True, is_installer_user=True,
password=User.objects.make_random_password(60), # type: ignore password=User.objects.make_random_password(60), # type: ignore
block_dashboard_login=True,
) )

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2.1 on 2021-07-20 20:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0023_user_is_installer_user'),
]
operations = [
migrations.AddField(
model_name='user',
name='last_login_ip',
field=models.GenericIPAddressField(blank=True, default=None, null=True),
),
]

View File

@@ -0,0 +1,33 @@
# Generated by Django 3.2.1 on 2021-07-21 04:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0024_user_last_login_ip'),
]
operations = [
migrations.AddField(
model_name='role',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='role',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='role',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='role',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -0,0 +1,34 @@
# Generated by Django 3.2.6 on 2021-09-01 12:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0025_auto_20210721_0424'),
]
operations = [
migrations.CreateModel(
name='APIKey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_by', models.CharField(blank=True, max_length=100, null=True)),
('created_time', models.DateTimeField(auto_now_add=True, null=True)),
('modified_by', models.CharField(blank=True, max_length=100, null=True)),
('modified_time', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=25, unique=True)),
('key', models.CharField(blank=True, max_length=48, unique=True)),
('expiration', models.DateTimeField(blank=True, default=None, null=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='role',
name='can_manage_api_keys',
field=models.BooleanField(default=False),
),
]

View File

@@ -0,0 +1,25 @@
# Generated by Django 3.2.6 on 2021-09-03 00:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0026_auto_20210901_1247'),
]
operations = [
migrations.AddField(
model_name='apikey',
name='user',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='api_key', to='accounts.user'),
preserve_default=False,
),
migrations.AddField(
model_name='user',
name='block_dashboard_login',
field=models.BooleanField(default=False),
),
]

View File

@@ -1,5 +1,6 @@
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser
from django.db import models from django.db import models
from django.db.models.fields import CharField, DateTimeField
from logs.models import BaseAuditModel from logs.models import BaseAuditModel
@@ -24,6 +25,7 @@ CLIENT_TREE_SORT_CHOICES = [
class User(AbstractUser, BaseAuditModel): class User(AbstractUser, BaseAuditModel):
is_active = models.BooleanField(default=True) is_active = models.BooleanField(default=True)
block_dashboard_login = models.BooleanField(default=False)
totp_key = models.CharField(max_length=50, null=True, blank=True) totp_key = models.CharField(max_length=50, null=True, blank=True)
dark_mode = models.BooleanField(default=True) dark_mode = models.BooleanField(default=True)
show_community_scripts = models.BooleanField(default=True) show_community_scripts = models.BooleanField(default=True)
@@ -48,6 +50,7 @@ class User(AbstractUser, BaseAuditModel):
loading_bar_color = models.CharField(max_length=255, default="red") loading_bar_color = models.CharField(max_length=255, default="red")
clear_search_when_switching = models.BooleanField(default=True) clear_search_when_switching = models.BooleanField(default=True)
is_installer_user = models.BooleanField(default=False) is_installer_user = models.BooleanField(default=False)
last_login_ip = models.GenericIPAddressField(default=None, blank=True, null=True)
agent = models.OneToOneField( agent = models.OneToOneField(
"agents.Agent", "agents.Agent",
@@ -73,7 +76,7 @@ class User(AbstractUser, BaseAuditModel):
return UserSerializer(user).data return UserSerializer(user).data
class Role(models.Model): class Role(BaseAuditModel):
name = models.CharField(max_length=255, unique=True) name = models.CharField(max_length=255, unique=True)
is_superuser = models.BooleanField(default=False) is_superuser = models.BooleanField(default=False)
@@ -137,9 +140,19 @@ class Role(models.Model):
can_manage_accounts = models.BooleanField(default=False) can_manage_accounts = models.BooleanField(default=False)
can_manage_roles = models.BooleanField(default=False) can_manage_roles = models.BooleanField(default=False)
# authentication
can_manage_api_keys = models.BooleanField(default=False)
def __str__(self): def __str__(self):
return self.name return self.name
@staticmethod
def serialize(role):
# serializes the agent and returns json
from .serializers import RoleAuditSerializer
return RoleAuditSerializer(role).data
@staticmethod @staticmethod
def perms(): def perms():
return [ return [
@@ -178,4 +191,22 @@ class Role(models.Model):
"can_manage_winupdates", "can_manage_winupdates",
"can_manage_accounts", "can_manage_accounts",
"can_manage_roles", "can_manage_roles",
"can_manage_api_keys",
] ]
class APIKey(BaseAuditModel):
name = CharField(unique=True, max_length=25)
key = CharField(unique=True, blank=True, max_length=48)
expiration = DateTimeField(blank=True, null=True, default=None)
user = models.ForeignKey(
"accounts.User",
related_name="api_key",
on_delete=models.CASCADE,
)
@staticmethod
def serialize(apikey):
from .serializers import APIKeyAuditSerializer
return APIKeyAuditSerializer(apikey).data

View File

@@ -8,6 +8,21 @@ class AccountsPerms(permissions.BasePermission):
if r.method == "GET": if r.method == "GET":
return True return True
# allow users to reset their own password/2fa see issue #686
base_path = "/accounts/users/"
paths = ["reset/", "reset_totp/"]
if r.path in [base_path + i for i in paths]:
from accounts.models import User
try:
user = User.objects.get(pk=r.data["id"])
except User.DoesNotExist:
pass
else:
if user == r.user:
return True
return _has_perm(r, "can_manage_accounts") return _has_perm(r, "can_manage_accounts")
@@ -17,3 +32,9 @@ class RolesPerms(permissions.BasePermission):
return True return True
return _has_perm(r, "can_manage_roles") return _has_perm(r, "can_manage_roles")
class APIKeyPerms(permissions.BasePermission):
def has_permission(self, r, view):
return _has_perm(r, "can_manage_api_keys")

View File

@@ -1,7 +1,11 @@
import pyotp import pyotp
from rest_framework.serializers import ModelSerializer, SerializerMethodField from rest_framework.serializers import (
ModelSerializer,
SerializerMethodField,
ReadOnlyField,
)
from .models import User, Role from .models import APIKey, User, Role
class UserUISerializer(ModelSerializer): class UserUISerializer(ModelSerializer):
@@ -17,6 +21,7 @@ class UserUISerializer(ModelSerializer):
"client_tree_splitter", "client_tree_splitter",
"loading_bar_color", "loading_bar_color",
"clear_search_when_switching", "clear_search_when_switching",
"block_dashboard_login",
] ]
@@ -31,7 +36,9 @@ class UserSerializer(ModelSerializer):
"email", "email",
"is_active", "is_active",
"last_login", "last_login",
"last_login_ip",
"role", "role",
"block_dashboard_login",
] ]
@@ -57,3 +64,30 @@ class RoleSerializer(ModelSerializer):
class Meta: class Meta:
model = Role model = Role
fields = "__all__" fields = "__all__"
class RoleAuditSerializer(ModelSerializer):
class Meta:
model = Role
fields = "__all__"
class APIKeySerializer(ModelSerializer):
username = ReadOnlyField(source="user.username")
class Meta:
model = APIKey
fields = "__all__"
class APIKeyAuditSerializer(ModelSerializer):
username = ReadOnlyField(source="user.username")
class Meta:
model = APIKey
fields = [
"name",
"username",
"expiration",
]

View File

@@ -1,10 +1,12 @@
from unittest.mock import patch from unittest.mock import patch
from django.test import override_settings from django.test import override_settings
from model_bakery import baker, seq
from accounts.models import User from accounts.models import User, APIKey
from tacticalrmm.test import TacticalTestCase from tacticalrmm.test import TacticalTestCase
from accounts.serializers import APIKeySerializer
class TestAccounts(TacticalTestCase): class TestAccounts(TacticalTestCase):
def setUp(self): def setUp(self):
@@ -39,6 +41,12 @@ class TestAccounts(TacticalTestCase):
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "ok") self.assertEqual(r.data, "ok")
# test user set to block dashboard logins
self.bob.block_dashboard_login = True
self.bob.save()
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
@patch("pyotp.TOTP.verify") @patch("pyotp.TOTP.verify")
def test_login_view(self, mock_verify): def test_login_view(self, mock_verify):
url = "/login/" url = "/login/"
@@ -288,6 +296,68 @@ class TestUserAction(TacticalTestCase):
self.check_not_authenticated("patch", url) self.check_not_authenticated("patch", url)
class TestAPIKeyViews(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
self.authenticate()
def test_get_api_keys(self):
url = "/accounts/apikeys/"
apikeys = baker.make("accounts.APIKey", key=seq("APIKEY"), _quantity=3)
serializer = APIKeySerializer(apikeys, many=True)
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(serializer.data, resp.data) # type: ignore
self.check_not_authenticated("get", url)
def test_add_api_keys(self):
url = "/accounts/apikeys/"
user = baker.make("accounts.User")
data = {"name": "Name", "user": user.id, "expiration": None}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertTrue(APIKey.objects.filter(name="Name").exists())
self.assertTrue(APIKey.objects.get(name="Name").key)
self.check_not_authenticated("post", url)
def test_modify_api_key(self):
# test a call where api key doesn't exist
resp = self.client.put("/accounts/apikeys/500/", format="json")
self.assertEqual(resp.status_code, 404)
apikey = baker.make("accounts.APIKey", name="Test")
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
data = {"name": "New Name"} # type: ignore
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
apikey = APIKey.objects.get(pk=apikey.pk) # type: ignore
self.assertEquals(apikey.name, "New Name")
self.check_not_authenticated("put", url)
def test_delete_api_key(self):
# test a call where api key doesn't exist
resp = self.client.delete("/accounts/apikeys/500/", format="json")
self.assertEqual(resp.status_code, 404)
# test delete api key
apikey = baker.make("accounts.APIKey")
url = f"/accounts/apikeys/{apikey.pk}/" # type: ignore
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertFalse(APIKey.objects.filter(pk=apikey.pk).exists()) # type: ignore
self.check_not_authenticated("delete", url)
class TestTOTPSetup(TacticalTestCase): class TestTOTPSetup(TacticalTestCase):
def setUp(self): def setUp(self):
self.authenticate() self.authenticate()
@@ -313,3 +383,29 @@ class TestTOTPSetup(TacticalTestCase):
r = self.client.post(url) r = self.client.post(url)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, "totp token already set") self.assertEqual(r.data, "totp token already set")
class TestAPIAuthentication(TacticalTestCase):
def setUp(self):
# create User and associate to API Key
self.user = User.objects.create(username="api_user", is_superuser=True)
self.api_key = APIKey.objects.create(
name="Test Token", key="123456", user=self.user
)
self.client_setup()
def test_api_auth(self):
url = "/clients/clients/"
# auth should fail if no header set
self.check_not_authenticated("get", url)
# invalid api key in header should return code 400
self.client.credentials(HTTP_X_API_KEY="000000")
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 401)
# valid api key in header should return code 200
self.client.credentials(HTTP_X_API_KEY="123456")
r = self.client.get(url, format="json")
self.assertEqual(r.status_code, 200)

View File

@@ -12,4 +12,6 @@ urlpatterns = [
path("permslist/", views.PermsList.as_view()), path("permslist/", views.PermsList.as_view()),
path("roles/", views.GetAddRoles.as_view()), path("roles/", views.GetAddRoles.as_view()),
path("<int:pk>/role/", views.GetUpdateDeleteRole.as_view()), path("<int:pk>/role/", views.GetUpdateDeleteRole.as_view()),
path("apikeys/", views.GetAddAPIKeys.as_view()),
path("apikeys/<int:pk>/", views.GetUpdateDeleteAPIKey.as_view()),
] ]

View File

@@ -3,23 +3,24 @@ from django.conf import settings
from django.contrib.auth import login from django.contrib.auth import login
from django.db import IntegrityError from django.db import IntegrityError
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from ipware import get_client_ip
from knox.views import LoginView as KnoxLoginView from knox.views import LoginView as KnoxLoginView
from logs.models import AuditLog
from rest_framework import status from rest_framework import status
from rest_framework.authtoken.serializers import AuthTokenSerializer from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from logs.models import AuditLog
from tacticalrmm.utils import notify_error from tacticalrmm.utils import notify_error
from .models import User, Role from .models import APIKey, Role, User
from .permissions import AccountsPerms, RolesPerms from .permissions import APIKeyPerms, AccountsPerms, RolesPerms
from .serializers import ( from .serializers import (
APIKeySerializer,
RoleSerializer,
TOTPSetupSerializer, TOTPSetupSerializer,
UserSerializer, UserSerializer,
UserUISerializer, UserUISerializer,
RoleSerializer,
) )
@@ -40,11 +41,16 @@ class CheckCreds(KnoxLoginView):
# check credentials # check credentials
serializer = AuthTokenSerializer(data=request.data) serializer = AuthTokenSerializer(data=request.data)
if not serializer.is_valid(): if not serializer.is_valid():
AuditLog.audit_user_failed_login(request.data["username"]) AuditLog.audit_user_failed_login(
request.data["username"], debug_info={"ip": request._client_ip}
)
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST) return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
user = serializer.validated_data["user"] user = serializer.validated_data["user"]
if user.block_dashboard_login:
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
# if totp token not set modify response to notify frontend # if totp token not set modify response to notify frontend
if not user.totp_key: if not user.totp_key:
login(request, user) login(request, user)
@@ -66,6 +72,9 @@ class LoginView(KnoxLoginView):
serializer.is_valid(raise_exception=True) serializer.is_valid(raise_exception=True)
user = serializer.validated_data["user"] user = serializer.validated_data["user"]
if user.block_dashboard_login:
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
token = request.data["twofactor"] token = request.data["twofactor"]
totp = pyotp.TOTP(user.totp_key) totp = pyotp.TOTP(user.totp_key)
@@ -76,10 +85,20 @@ class LoginView(KnoxLoginView):
if valid: if valid:
login(request, user) login(request, user)
AuditLog.audit_user_login_successful(request.data["username"])
# save ip information
client_ip, is_routable = get_client_ip(request)
user.last_login_ip = client_ip
user.save()
AuditLog.audit_user_login_successful(
request.data["username"], debug_info={"ip": request._client_ip}
)
return super(LoginView, self).post(request, format=None) return super(LoginView, self).post(request, format=None)
else: else:
AuditLog.audit_user_failed_twofactor(request.data["username"]) AuditLog.audit_user_failed_twofactor(
request.data["username"], debug_info={"ip": request._client_ip}
)
return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST) return Response("bad credentials", status=status.HTTP_400_BAD_REQUEST)
@@ -87,7 +106,14 @@ class GetAddUsers(APIView):
permission_classes = [IsAuthenticated, AccountsPerms] permission_classes = [IsAuthenticated, AccountsPerms]
def get(self, request): def get(self, request):
users = User.objects.filter(agent=None, is_installer_user=False) search = request.GET.get("search", None)
if search:
users = User.objects.filter(agent=None, is_installer_user=False).filter(
username__icontains=search
)
else:
users = User.objects.filter(agent=None, is_installer_user=False)
return Response(UserSerializer(users, many=True).data) return Response(UserSerializer(users, many=True).data)
@@ -104,8 +130,10 @@ class GetAddUsers(APIView):
f"ERROR: User {request.data['username']} already exists!" f"ERROR: User {request.data['username']} already exists!"
) )
user.first_name = request.data["first_name"] if "first_name" in request.data.keys():
user.last_name = request.data["last_name"] user.first_name = request.data["first_name"]
if "last_name" in request.data.keys():
user.last_name = request.data["last_name"]
if "role" in request.data.keys() and isinstance(request.data["role"], int): if "role" in request.data.keys() and isinstance(request.data["role"], int):
role = get_object_or_404(Role, pk=request.data["role"]) role = get_object_or_404(Role, pk=request.data["role"])
user.role = role user.role = role
@@ -233,3 +261,48 @@ class GetUpdateDeleteRole(APIView):
role = get_object_or_404(Role, pk=pk) role = get_object_or_404(Role, pk=pk)
role.delete() role.delete()
return Response("ok") return Response("ok")
class GetAddAPIKeys(APIView):
permission_classes = [IsAuthenticated, APIKeyPerms]
def get(self, request):
apikeys = APIKey.objects.all()
return Response(APIKeySerializer(apikeys, many=True).data)
def post(self, request):
# generate a random API Key
# https://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits/23728630#23728630
import random
import string
request.data["key"] = "".join(
random.SystemRandom().choice(string.ascii_uppercase + string.digits)
for _ in range(32)
)
serializer = APIKeySerializer(data=request.data)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
return Response("The API Key was added")
class GetUpdateDeleteAPIKey(APIView):
permission_classes = [IsAuthenticated, APIKeyPerms]
def put(self, request, pk):
apikey = get_object_or_404(APIKey, pk=pk)
# remove API key is present in request data
if "key" in request.data.keys():
request.data.pop("key")
serializer = APIKeySerializer(instance=apikey, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("The API Key was edited")
def delete(self, request, pk):
apikey = get_object_or_404(APIKey, pk=pk)
apikey.delete()
return Response("The API Key was deleted")

View File

@@ -1,8 +1,9 @@
from django.contrib import admin from django.contrib import admin
from .models import Agent, AgentCustomField, Note, RecoveryAction from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
admin.site.register(Agent) admin.site.register(Agent)
admin.site.register(RecoveryAction) admin.site.register(RecoveryAction)
admin.site.register(Note) admin.site.register(Note)
admin.site.register(AgentCustomField) admin.site.register(AgentCustomField)
admin.site.register(AgentHistory)

View File

@@ -0,0 +1,27 @@
# Generated by Django 3.2.1 on 2021-07-06 02:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('agents', '0037_auto_20210627_0014'),
]
operations = [
migrations.CreateModel(
name='AgentHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateTimeField(auto_now_add=True)),
('type', models.CharField(choices=[('task_run', 'Task Run'), ('script_run', 'Script Run'), ('cmd_run', 'CMD Run')], default='cmd_run', max_length=50)),
('command', models.TextField(blank=True, null=True)),
('status', models.CharField(choices=[('success', 'Success'), ('failure', 'Failure')], default='success', max_length=50)),
('username', models.CharField(default='system', max_length=50)),
('results', models.TextField(blank=True, null=True)),
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to='agents.agent')),
],
),
]

View File

@@ -0,0 +1,25 @@
# Generated by Django 3.2.5 on 2021-07-14 07:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('scripts', '0008_script_guid'),
('agents', '0038_agenthistory'),
]
operations = [
migrations.AddField(
model_name='agenthistory',
name='script',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='history', to='scripts.script'),
),
migrations.AddField(
model_name='agenthistory',
name='script_results',
field=models.JSONField(blank=True, null=True),
),
]

View File

@@ -16,14 +16,12 @@ from django.conf import settings
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db import models from django.db import models
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger
from nats.aio.client import Client as NATS from nats.aio.client import Client as NATS
from nats.aio.errors import ErrTimeout from nats.aio.errors import ErrTimeout
from packaging import version as pyver
from core.models import TZ_CHOICES, CoreSettings from core.models import TZ_CHOICES, CoreSettings
from logs.models import BaseAuditModel from logs.models import BaseAuditModel, DebugLog
logger.configure(**settings.LOG_CONFIG)
class Agent(BaseAuditModel): class Agent(BaseAuditModel):
@@ -89,10 +87,11 @@ class Agent(BaseAuditModel):
) )
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
from automation.tasks import generate_agent_checks_task
# get old agent if exists # get old agent if exists
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None old_agent = Agent.objects.get(pk=self.pk) if self.pk else None
super(BaseAuditModel, self).save(*args, **kwargs) super(Agent, self).save(old_model=old_agent, *args, **kwargs)
# check if new agent has been created # check if new agent has been created
# or check if policy have changed on agent # or check if policy have changed on agent
@@ -105,8 +104,11 @@ class Agent(BaseAuditModel):
or (old_agent.monitoring_type != self.monitoring_type) or (old_agent.monitoring_type != self.monitoring_type)
or (old_agent.block_policy_inheritance != self.block_policy_inheritance) or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
): ):
self.generate_checks_from_policies() generate_agent_checks_task.delay(agents=[self.pk], create_tasks=True)
self.generate_tasks_from_policies()
# calculate alert template for new agents
if not old_agent:
self.set_alert_template()
def __str__(self): def __str__(self):
return self.hostname return self.hostname
@@ -123,7 +125,7 @@ class Agent(BaseAuditModel):
else: else:
from core.models import CoreSettings from core.models import CoreSettings
return CoreSettings.objects.first().default_time_zone return CoreSettings.objects.first().default_time_zone # type: ignore
@property @property
def arch(self): def arch(self):
@@ -325,6 +327,7 @@ class Agent(BaseAuditModel):
full: bool = False, full: bool = False,
wait: bool = False, wait: bool = False,
run_on_any: bool = False, run_on_any: bool = False,
history_pk: int = 0,
) -> Any: ) -> Any:
from scripts.models import Script from scripts.models import Script
@@ -343,6 +346,9 @@ class Agent(BaseAuditModel):
}, },
} }
if history_pk != 0 and pyver.parse(self.version) >= pyver.parse("1.6.0"):
data["id"] = history_pk
running_agent = self running_agent = self
if run_on_any: if run_on_any:
nats_ping = {"func": "ping"} nats_ping = {"func": "ping"}
@@ -411,6 +417,13 @@ class Agent(BaseAuditModel):
update.action = "approve" update.action = "approve"
update.save(update_fields=["action"]) update.save(update_fields=["action"])
if updates:
DebugLog.info(
agent=self,
log_type="windows_updates",
message=f"Approving windows updates on {self.hostname}",
)
# returns agent policy merged with a client or site specific policy # returns agent policy merged with a client or site specific policy
def get_patch_policy(self): def get_patch_policy(self):
@@ -445,8 +458,8 @@ class Agent(BaseAuditModel):
# if patch policy still doesn't exist check default policy # if patch policy still doesn't exist check default policy
elif ( elif (
core_settings.server_policy core_settings.server_policy # type: ignore
and core_settings.server_policy.winupdatepolicy.exists() and core_settings.server_policy.winupdatepolicy.exists() # type: ignore
): ):
# make sure agent site and client are not blocking inheritance # make sure agent site and client are not blocking inheritance
if ( if (
@@ -454,7 +467,7 @@ class Agent(BaseAuditModel):
and not site.block_policy_inheritance and not site.block_policy_inheritance
and not site.client.block_policy_inheritance and not site.client.block_policy_inheritance
): ):
patch_policy = core_settings.server_policy.winupdatepolicy.get() patch_policy = core_settings.server_policy.winupdatepolicy.get() # type: ignore
elif self.monitoring_type == "workstation": elif self.monitoring_type == "workstation":
# check agent policy first which should override client or site policy # check agent policy first which should override client or site policy
@@ -483,8 +496,8 @@ class Agent(BaseAuditModel):
# if patch policy still doesn't exist check default policy # if patch policy still doesn't exist check default policy
elif ( elif (
core_settings.workstation_policy core_settings.workstation_policy # type: ignore
and core_settings.workstation_policy.winupdatepolicy.exists() and core_settings.workstation_policy.winupdatepolicy.exists() # type: ignore
): ):
# make sure agent site and client are not blocking inheritance # make sure agent site and client are not blocking inheritance
if ( if (
@@ -493,7 +506,7 @@ class Agent(BaseAuditModel):
and not site.client.block_policy_inheritance and not site.client.block_policy_inheritance
): ):
patch_policy = ( patch_policy = (
core_settings.workstation_policy.winupdatepolicy.get() core_settings.workstation_policy.winupdatepolicy.get() # type: ignore
) )
# if policy still doesn't exist return the agent patch policy # if policy still doesn't exist return the agent patch policy
@@ -608,35 +621,35 @@ class Agent(BaseAuditModel):
# check if alert template is applied globally and return # check if alert template is applied globally and return
if ( if (
core.alert_template core.alert_template # type: ignore
and core.alert_template.is_active and core.alert_template.is_active # type: ignore
and not self.block_policy_inheritance and not self.block_policy_inheritance
and not site.block_policy_inheritance and not site.block_policy_inheritance
and not client.block_policy_inheritance and not client.block_policy_inheritance
): ):
templates.append(core.alert_template) templates.append(core.alert_template) # type: ignore
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core # if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
if ( if (
self.monitoring_type == "server" self.monitoring_type == "server"
and core.server_policy and core.server_policy # type: ignore
and core.server_policy.alert_template and core.server_policy.alert_template # type: ignore
and core.server_policy.alert_template.is_active and core.server_policy.alert_template.is_active # type: ignore
and not self.block_policy_inheritance and not self.block_policy_inheritance
and not site.block_policy_inheritance and not site.block_policy_inheritance
and not client.block_policy_inheritance and not client.block_policy_inheritance
): ):
templates.append(core.server_policy.alert_template) templates.append(core.server_policy.alert_template) # type: ignore
if ( if (
self.monitoring_type == "workstation" self.monitoring_type == "workstation"
and core.workstation_policy and core.workstation_policy # type: ignore
and core.workstation_policy.alert_template and core.workstation_policy.alert_template # type: ignore
and core.workstation_policy.alert_template.is_active and core.workstation_policy.alert_template.is_active # type: ignore
and not self.block_policy_inheritance and not self.block_policy_inheritance
and not site.block_policy_inheritance and not site.block_policy_inheritance
and not client.block_policy_inheritance and not client.block_policy_inheritance
): ):
templates.append(core.workstation_policy.alert_template) templates.append(core.workstation_policy.alert_template) # type: ignore
# go through the templates and return the first one that isn't excluded # go through the templates and return the first one that isn't excluded
for template in templates: for template in templates:
@@ -739,7 +752,7 @@ class Agent(BaseAuditModel):
try: try:
ret = msgpack.loads(msg.data) # type: ignore ret = msgpack.loads(msg.data) # type: ignore
except Exception as e: except Exception as e:
logger.error(e) DebugLog.error(agent=self, log_type="agent_issues", message=e)
ret = str(e) ret = str(e)
await nc.close() await nc.close()
@@ -752,12 +765,9 @@ class Agent(BaseAuditModel):
@staticmethod @staticmethod
def serialize(agent): def serialize(agent):
# serializes the agent and returns json # serializes the agent and returns json
from .serializers import AgentEditSerializer from .serializers import AgentAuditSerializer
ret = AgentEditSerializer(agent).data return AgentAuditSerializer(agent).data
del ret["all_timezones"]
del ret["client"]
return ret
def delete_superseded_updates(self): def delete_superseded_updates(self):
try: try:
@@ -772,7 +782,7 @@ class Agent(BaseAuditModel):
# skip if no version info is available therefore nothing to parse # skip if no version info is available therefore nothing to parse
try: try:
vers = [ vers = [
re.search(r"\(Version(.*?)\)", i).group(1).strip() re.search(r"\(Version(.*?)\)", i).group(1).strip() # type: ignore
for i in titles for i in titles
] ]
sorted_vers = sorted(vers, key=LooseVersion) sorted_vers = sorted(vers, key=LooseVersion)
@@ -807,7 +817,7 @@ class Agent(BaseAuditModel):
from core.models import CoreSettings from core.models import CoreSettings
CORE = CoreSettings.objects.first() CORE = CoreSettings.objects.first()
CORE.send_mail( CORE.send_mail( # type: ignore
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
( (
f"Data has not been received from client {self.client.name}, " f"Data has not been received from client {self.client.name}, "
@@ -822,7 +832,7 @@ class Agent(BaseAuditModel):
from core.models import CoreSettings from core.models import CoreSettings
CORE = CoreSettings.objects.first() CORE = CoreSettings.objects.first()
CORE.send_mail( CORE.send_mail( # type: ignore
f"{self.client.name}, {self.site.name}, {self.hostname} - data received", f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
( (
f"Data has been received from client {self.client.name}, " f"Data has been received from client {self.client.name}, "
@@ -837,7 +847,7 @@ class Agent(BaseAuditModel):
from core.models import CoreSettings from core.models import CoreSettings
CORE = CoreSettings.objects.first() CORE = CoreSettings.objects.first()
CORE.send_sms( CORE.send_sms( # type: ignore
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
alert_template=self.alert_template, alert_template=self.alert_template,
) )
@@ -846,7 +856,7 @@ class Agent(BaseAuditModel):
from core.models import CoreSettings from core.models import CoreSettings
CORE = CoreSettings.objects.first() CORE = CoreSettings.objects.first()
CORE.send_sms( CORE.send_sms( # type: ignore
f"{self.client.name}, {self.site.name}, {self.hostname} - data received", f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
alert_template=self.alert_template, alert_template=self.alert_template,
) )
@@ -928,3 +938,57 @@ class AgentCustomField(models.Model):
return self.bool_value return self.bool_value
else: else:
return self.string_value return self.string_value
def save_to_field(self, value):
if self.field.type in [
"text",
"number",
"single",
"datetime",
]:
self.string_value = value
self.save()
elif self.field.type == "multiple":
self.multiple_value = value.split(",")
self.save()
elif self.field.type == "checkbox":
self.bool_value = bool(value)
self.save()
AGENT_HISTORY_TYPES = (
("task_run", "Task Run"),
("script_run", "Script Run"),
("cmd_run", "CMD Run"),
)
AGENT_HISTORY_STATUS = (("success", "Success"), ("failure", "Failure"))
class AgentHistory(models.Model):
agent = models.ForeignKey(
Agent,
related_name="history",
on_delete=models.CASCADE,
)
time = models.DateTimeField(auto_now_add=True)
type = models.CharField(
max_length=50, choices=AGENT_HISTORY_TYPES, default="cmd_run"
)
command = models.TextField(null=True, blank=True)
status = models.CharField(
max_length=50, choices=AGENT_HISTORY_STATUS, default="success"
)
username = models.CharField(max_length=50, default="system")
results = models.TextField(null=True, blank=True)
script = models.ForeignKey(
"scripts.Script",
null=True,
blank=True,
related_name="history",
on_delete=models.SET_NULL,
)
script_results = models.JSONField(null=True, blank=True)
def __str__(self):
return f"{self.agent.hostname} - {self.type}"

View File

@@ -1,10 +1,10 @@
import pytz import pytz
from rest_framework import serializers
from clients.serializers import ClientSerializer from clients.serializers import ClientSerializer
from rest_framework import serializers
from tacticalrmm.utils import get_default_timezone
from winupdate.serializers import WinUpdatePolicySerializer from winupdate.serializers import WinUpdatePolicySerializer
from .models import Agent, AgentCustomField, Note from .models import Agent, AgentCustomField, Note, AgentHistory
class AgentSerializer(serializers.ModelSerializer): class AgentSerializer(serializers.ModelSerializer):
@@ -159,6 +159,7 @@ class AgentEditSerializer(serializers.ModelSerializer):
"offline_time", "offline_time",
"overdue_text_alert", "overdue_text_alert",
"overdue_email_alert", "overdue_email_alert",
"overdue_dashboard_alert",
"all_timezones", "all_timezones",
"winupdatepolicy", "winupdatepolicy",
"policy", "policy",
@@ -200,3 +201,22 @@ class NotesSerializer(serializers.ModelSerializer):
class Meta: class Meta:
model = Agent model = Agent
fields = ["hostname", "pk", "notes"] fields = ["hostname", "pk", "notes"]
class AgentHistorySerializer(serializers.ModelSerializer):
time = serializers.SerializerMethodField(read_only=True)
script_name = serializers.ReadOnlyField(source="script.name")
class Meta:
model = AgentHistory
fields = "__all__"
def get_time(self, history):
tz = self.context["default_tz"]
return history.time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")
class AgentAuditSerializer(serializers.ModelSerializer):
class Meta:
model = Agent
exclude = ["disks", "services", "wmi_detail"]

View File

@@ -1,26 +1,21 @@
import asyncio import asyncio
import datetime as dt import datetime as dt
import random import random
import tempfile
import json
import subprocess
import urllib.parse import urllib.parse
from time import sleep from time import sleep
from typing import Union from typing import Union
from alerts.models import Alert
from core.models import CodeSignToken, CoreSettings
from django.conf import settings from django.conf import settings
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger from logs.models import DebugLog, PendingAction
from packaging import version as pyver from packaging import version as pyver
from agents.models import Agent
from core.models import CodeSignToken, CoreSettings
from logs.models import PendingAction
from scripts.models import Script from scripts.models import Script
from tacticalrmm.celery import app from tacticalrmm.celery import app
from tacticalrmm.utils import run_nats_api_cmd from tacticalrmm.utils import run_nats_api_cmd
logger.configure(**settings.LOG_CONFIG) from agents.models import Agent
def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str: def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str:
@@ -33,8 +28,10 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
# skip if we can't determine the arch # skip if we can't determine the arch
if agent.arch is None: if agent.arch is None:
logger.warning( DebugLog.warning(
f"Unable to determine arch on {agent.hostname}. Skipping agent update." agent=agent,
log_type="agent_issues",
message=f"Unable to determine arch on {agent.hostname}({agent.pk}). Skipping agent update.",
) )
return "noarch" return "noarch"
@@ -81,7 +78,7 @@ def agent_update(pk: int, codesigntoken: str = None, force: bool = False) -> str
@app.task @app.task
def force_code_sign(pks: list[int]) -> None: def force_code_sign(pks: list[int]) -> None:
try: try:
token = CodeSignToken.objects.first().token token = CodeSignToken.objects.first().token # type:ignore
except: except:
return return
@@ -96,7 +93,7 @@ def force_code_sign(pks: list[int]) -> None:
@app.task @app.task
def send_agent_update_task(pks: list[int]) -> None: def send_agent_update_task(pks: list[int]) -> None:
try: try:
codesigntoken = CodeSignToken.objects.first().token codesigntoken = CodeSignToken.objects.first().token # type:ignore
except: except:
codesigntoken = None codesigntoken = None
@@ -111,11 +108,11 @@ def send_agent_update_task(pks: list[int]) -> None:
@app.task @app.task
def auto_self_agent_update_task() -> None: def auto_self_agent_update_task() -> None:
core = CoreSettings.objects.first() core = CoreSettings.objects.first()
if not core.agent_auto_update: if not core.agent_auto_update: # type:ignore
return return
try: try:
codesigntoken = CodeSignToken.objects.first().token codesigntoken = CodeSignToken.objects.first().token # type:ignore
except: except:
codesigntoken = None codesigntoken = None
@@ -235,14 +232,24 @@ def run_script_email_results_task(
nats_timeout: int, nats_timeout: int,
emails: list[str], emails: list[str],
args: list[str] = [], args: list[str] = [],
history_pk: int = 0,
): ):
agent = Agent.objects.get(pk=agentpk) agent = Agent.objects.get(pk=agentpk)
script = Script.objects.get(pk=scriptpk) script = Script.objects.get(pk=scriptpk)
r = agent.run_script( r = agent.run_script(
scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True scriptpk=script.pk,
args=args,
full=True,
timeout=nats_timeout,
wait=True,
history_pk=history_pk,
) )
if r == "timeout": if r == "timeout":
logger.error(f"{agent.hostname} timed out running script.") DebugLog.error(
agent=agent,
log_type="scripting",
message=f"{agent.hostname}({agent.pk}) timed out running script.",
)
return return
CORE = CoreSettings.objects.first() CORE = CoreSettings.objects.first()
@@ -258,28 +265,32 @@ def run_script_email_results_task(
msg = EmailMessage() msg = EmailMessage()
msg["Subject"] = subject msg["Subject"] = subject
msg["From"] = CORE.smtp_from_email msg["From"] = CORE.smtp_from_email # type:ignore
if emails: if emails:
msg["To"] = ", ".join(emails) msg["To"] = ", ".join(emails)
else: else:
msg["To"] = ", ".join(CORE.email_alert_recipients) msg["To"] = ", ".join(CORE.email_alert_recipients) # type:ignore
msg.set_content(body) msg.set_content(body)
try: try:
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server: with smtplib.SMTP(
if CORE.smtp_requires_auth: CORE.smtp_host, CORE.smtp_port, timeout=20 # type:ignore
) as server: # type:ignore
if CORE.smtp_requires_auth: # type:ignore
server.ehlo() server.ehlo()
server.starttls() server.starttls()
server.login(CORE.smtp_host_user, CORE.smtp_host_password) server.login(
CORE.smtp_host_user, CORE.smtp_host_password # type:ignore
) # type:ignore
server.send_message(msg) server.send_message(msg)
server.quit() server.quit()
else: else:
server.send_message(msg) server.send_message(msg)
server.quit() server.quit()
except Exception as e: except Exception as e:
logger.error(e) DebugLog.error(message=e)
@app.task @app.task
@@ -310,15 +321,6 @@ def clear_faults_task(older_than_days: int) -> None:
) )
@app.task
def monitor_agents_task() -> None:
agents = Agent.objects.only(
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
)
ids = [i.agent_id for i in agents if i.status != "online"]
run_nats_api_cmd("monitor", ids)
@app.task @app.task
def get_wmi_task() -> None: def get_wmi_task() -> None:
agents = Agent.objects.only( agents = Agent.objects.only(
@@ -330,18 +332,62 @@ def get_wmi_task() -> None:
@app.task @app.task
def agent_checkin_task() -> None: def agent_checkin_task() -> None:
db = settings.DATABASES["default"] run_nats_api_cmd("checkin", timeout=30)
config = {
"key": settings.SECRET_KEY,
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222", @app.task
"user": db["USER"], def agent_getinfo_task() -> None:
"pass": db["PASSWORD"], run_nats_api_cmd("agentinfo", timeout=30)
"host": db["HOST"],
"port": int(db["PORT"]),
"dbname": db["NAME"], @app.task
} def prune_agent_history(older_than_days: int) -> str:
with tempfile.NamedTemporaryFile() as fp: from .models import AgentHistory
with open(fp.name, "w") as f:
json.dump(config, f) AgentHistory.objects.filter(
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", "checkin"] time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
subprocess.run(cmd, timeout=30) ).delete()
return "ok"
@app.task
def handle_agents_task() -> None:
q = Agent.objects.prefetch_related("pendingactions", "autotasks").only(
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
)
agents = [
i
for i in q
if pyver.parse(i.version) >= pyver.parse("1.6.0") and i.status == "online"
]
for agent in agents:
# change agent update pending status to completed if agent has just updated
if (
pyver.parse(agent.version) == pyver.parse(settings.LATEST_AGENT_VER)
and agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists()
):
agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).update(status="completed")
# sync scheduled tasks
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
for task in tasks:
if task.sync_status == "pendingdeletion":
task.delete_task_on_agent()
elif task.sync_status == "initial":
task.modify_task_on_agent()
elif task.sync_status == "notsynced":
task.create_task_on_agent()
# handles any alerting actions
if Alert.objects.filter(agent=agent, resolved=False).exists():
try:
Alert.handle_alert_resolve(agent)
except:
continue

View File

@@ -1,19 +1,19 @@
import json import json
import os import os
from itertools import cycle import pytz
from django.utils import timezone as djangotime
from unittest.mock import patch from unittest.mock import patch
from django.conf import settings from django.conf import settings
from logs.models import PendingAction
from model_bakery import baker from model_bakery import baker
from packaging import version as pyver from packaging import version as pyver
from logs.models import PendingAction
from tacticalrmm.test import TacticalTestCase from tacticalrmm.test import TacticalTestCase
from winupdate.models import WinUpdatePolicy from winupdate.models import WinUpdatePolicy
from winupdate.serializers import WinUpdatePolicySerializer from winupdate.serializers import WinUpdatePolicySerializer
from .models import Agent, AgentCustomField from .models import Agent, AgentCustomField, AgentHistory
from .serializers import AgentSerializer from .serializers import AgentHistorySerializer, AgentSerializer
from .tasks import auto_self_agent_update_task from .tasks import auto_self_agent_update_task
@@ -306,7 +306,7 @@ class TestAgentViews(TacticalTestCase):
"shell": "cmd", "shell": "cmd",
"timeout": 30, "timeout": 30,
} }
mock_ret.return_value = "nt authority\system" mock_ret.return_value = "nt authority\\system"
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertIsInstance(r.data, str) # type: ignore self.assertIsInstance(r.data, str) # type: ignore
@@ -437,7 +437,7 @@ class TestAgentViews(TacticalTestCase):
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertEqual(RecoveryAction.objects.count(), 1) self.assertEqual(RecoveryAction.objects.count(), 1)
mesh_recovery = RecoveryAction.objects.first() mesh_recovery = RecoveryAction.objects.first()
self.assertEqual(mesh_recovery.mode, "mesh") self.assertEqual(mesh_recovery.mode, "mesh") # type: ignore
nats_cmd.reset_mock() nats_cmd.reset_mock()
RecoveryAction.objects.all().delete() RecoveryAction.objects.all().delete()
@@ -472,8 +472,8 @@ class TestAgentViews(TacticalTestCase):
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertEqual(RecoveryAction.objects.count(), 1) self.assertEqual(RecoveryAction.objects.count(), 1)
cmd_recovery = RecoveryAction.objects.first() cmd_recovery = RecoveryAction.objects.first()
self.assertEqual(cmd_recovery.mode, "command") self.assertEqual(cmd_recovery.mode, "command") # type: ignore
self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f") self.assertEqual(cmd_recovery.command, "shutdown /r /t 10 /f") # type: ignore
def test_agents_agent_detail(self): def test_agents_agent_detail(self):
url = f"/agents/{self.agent.pk}/agentdetail/" url = f"/agents/{self.agent.pk}/agentdetail/"
@@ -770,6 +770,9 @@ class TestAgentViews(TacticalTestCase):
@patch("agents.tasks.run_script_email_results_task.delay") @patch("agents.tasks.run_script_email_results_task.delay")
@patch("agents.models.Agent.run_script") @patch("agents.models.Agent.run_script")
def test_run_script(self, run_script, email_task): def test_run_script(self, run_script, email_task):
from .models import AgentCustomField, Note
from clients.models import ClientCustomField, SiteCustomField
run_script.return_value = "ok" run_script.return_value = "ok"
url = "/agents/runscript/" url = "/agents/runscript/"
script = baker.make_recipe("scripts.script") script = baker.make_recipe("scripts.script")
@@ -777,7 +780,7 @@ class TestAgentViews(TacticalTestCase):
# test wait # test wait
data = { data = {
"pk": self.agent.pk, "pk": self.agent.pk,
"scriptPK": script.pk, "script": script.pk,
"output": "wait", "output": "wait",
"args": [], "args": [],
"timeout": 15, "timeout": 15,
@@ -786,18 +789,18 @@ class TestAgentViews(TacticalTestCase):
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
run_script.assert_called_with( run_script.assert_called_with(
scriptpk=script.pk, args=[], timeout=18, wait=True scriptpk=script.pk, args=[], timeout=18, wait=True, history_pk=0
) )
run_script.reset_mock() run_script.reset_mock()
# test email default # test email default
data = { data = {
"pk": self.agent.pk, "pk": self.agent.pk,
"scriptPK": script.pk, "script": script.pk,
"output": "email", "output": "email",
"args": ["abc", "123"], "args": ["abc", "123"],
"timeout": 15, "timeout": 15,
"emailmode": "default", "emailMode": "default",
"emails": ["admin@example.com", "bob@example.com"], "emails": ["admin@example.com", "bob@example.com"],
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -812,7 +815,7 @@ class TestAgentViews(TacticalTestCase):
email_task.reset_mock() email_task.reset_mock()
# test email overrides # test email overrides
data["emailmode"] = "custom" data["emailMode"] = "custom"
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
email_task.assert_called_with( email_task.assert_called_with(
@@ -826,7 +829,7 @@ class TestAgentViews(TacticalTestCase):
# test fire and forget # test fire and forget
data = { data = {
"pk": self.agent.pk, "pk": self.agent.pk,
"scriptPK": script.pk, "script": script.pk,
"output": "forget", "output": "forget",
"args": ["hello", "world"], "args": ["hello", "world"],
"timeout": 22, "timeout": 22,
@@ -835,8 +838,139 @@ class TestAgentViews(TacticalTestCase):
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
run_script.assert_called_with( run_script.assert_called_with(
scriptpk=script.pk, args=["hello", "world"], timeout=25 scriptpk=script.pk, args=["hello", "world"], timeout=25, history_pk=0
) )
run_script.reset_mock()
# test collector
# save to agent custom field
custom_field = baker.make("core.CustomField", model="agent")
data = {
"pk": self.agent.pk,
"script": script.pk,
"output": "collector",
"args": ["hello", "world"],
"timeout": 22,
"custom_field": custom_field.id, # type: ignore
"save_all_output": True,
}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
timeout=25,
wait=True,
history_pk=0,
)
run_script.reset_mock()
self.assertEqual(
AgentCustomField.objects.get(agent=self.agent.pk, field=custom_field).value,
"ok",
)
# save to site custom field
custom_field = baker.make("core.CustomField", model="site")
data = {
"pk": self.agent.pk,
"script": script.pk,
"output": "collector",
"args": ["hello", "world"],
"timeout": 22,
"custom_field": custom_field.id, # type: ignore
"save_all_output": False,
}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
timeout=25,
wait=True,
history_pk=0,
)
run_script.reset_mock()
self.assertEqual(
SiteCustomField.objects.get(
site=self.agent.site.pk, field=custom_field
).value,
"ok",
)
# save to client custom field
custom_field = baker.make("core.CustomField", model="client")
data = {
"pk": self.agent.pk,
"script": script.pk,
"output": "collector",
"args": ["hello", "world"],
"timeout": 22,
"custom_field": custom_field.id, # type: ignore
"save_all_output": False,
}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
timeout=25,
wait=True,
history_pk=0,
)
run_script.reset_mock()
self.assertEqual(
ClientCustomField.objects.get(
client=self.agent.client.pk, field=custom_field
).value,
"ok",
)
# test save to note
data = {
"pk": self.agent.pk,
"script": script.pk,
"output": "note",
"args": ["hello", "world"],
"timeout": 22,
}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
run_script.assert_called_with(
scriptpk=script.pk,
args=["hello", "world"],
timeout=25,
wait=True,
history_pk=0,
)
run_script.reset_mock()
self.assertEqual(Note.objects.get(agent=self.agent).note, "ok")
def test_get_agent_history(self):
# setup data
agent = baker.make_recipe("agents.agent")
history = baker.make("agents.AgentHistory", agent=agent, _quantity=30)
url = f"/agents/history/{agent.id}/"
# test agent not found
r = self.client.get("/agents/history/500/", format="json")
self.assertEqual(r.status_code, 404)
# test pulling data
r = self.client.get(url, format="json")
ctx = {"default_tz": pytz.timezone("America/Los_Angeles")}
data = AgentHistorySerializer(history, many=True, context=ctx).data
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, data) # type:ignore
class TestAgentViewsNew(TacticalTestCase): class TestAgentViewsNew(TacticalTestCase):
@@ -1048,3 +1182,25 @@ class TestAgentTasks(TacticalTestCase):
r = auto_self_agent_update_task.s().apply() r = auto_self_agent_update_task.s().apply()
self.assertEqual(agent_update.call_count, 33) self.assertEqual(agent_update.call_count, 33)
def test_agent_history_prune_task(self):
from .tasks import prune_agent_history
# setup data
agent = baker.make_recipe("agents.agent")
history = baker.make(
"agents.AgentHistory",
agent=agent,
_quantity=50,
)
days = 0
for item in history: # type: ignore
item.time = djangotime.now() - djangotime.timedelta(days=days)
item.save()
days = days + 5
# delete AgentHistory older than 30 days
prune_agent_history(30)
self.assertEqual(AgentHistory.objects.filter(agent=agent).count(), 6)

View File

@@ -29,4 +29,5 @@ urlpatterns = [
path("bulk/", views.bulk), path("bulk/", views.bulk),
path("maintenance/", views.agent_maintenance), path("maintenance/", views.agent_maintenance),
path("<int:pk>/wmi/", views.WMI.as_view()), path("<int:pk>/wmi/", views.WMI.as_view()),
path("history/<int:pk>/", views.AgentHistoryView.as_view()),
] ]

View File

@@ -8,7 +8,6 @@ import time
from django.conf import settings from django.conf import settings
from django.http import HttpResponse from django.http import HttpResponse
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from loguru import logger
from packaging import version as pyver from packaging import version as pyver
from rest_framework import status from rest_framework import status
from rest_framework.decorators import api_view, permission_classes from rest_framework.decorators import api_view, permission_classes
@@ -17,14 +16,14 @@ from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from core.models import CoreSettings from core.models import CoreSettings
from logs.models import AuditLog, PendingAction from logs.models import AuditLog, DebugLog, PendingAction
from scripts.models import Script from scripts.models import Script
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
from winupdate.serializers import WinUpdatePolicySerializer from winupdate.serializers import WinUpdatePolicySerializer
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
from .models import Agent, AgentCustomField, Note, RecoveryAction from .models import Agent, AgentCustomField, Note, RecoveryAction, AgentHistory
from .permissions import ( from .permissions import (
EditAgentPerms, EditAgentPerms,
EvtLogPerms, EvtLogPerms,
@@ -42,6 +41,7 @@ from .permissions import (
from .serializers import ( from .serializers import (
AgentCustomFieldSerializer, AgentCustomFieldSerializer,
AgentEditSerializer, AgentEditSerializer,
AgentHistorySerializer,
AgentHostnameSerializer, AgentHostnameSerializer,
AgentOverdueActionSerializer, AgentOverdueActionSerializer,
AgentSerializer, AgentSerializer,
@@ -51,8 +51,6 @@ from .serializers import (
) )
from .tasks import run_script_email_results_task, send_agent_update_task from .tasks import run_script_email_results_task, send_agent_update_task
logger.configure(**settings.LOG_CONFIG)
@api_view() @api_view()
def get_agent_versions(request): def get_agent_versions(request):
@@ -115,7 +113,7 @@ def uninstall(request):
def edit_agent(request): def edit_agent(request):
agent = get_object_or_404(Agent, pk=request.data["id"]) agent = get_object_or_404(Agent, pk=request.data["id"])
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True) a_serializer = AgentEditSerializer(instance=agent, data=request.data, partial=True)
a_serializer.is_valid(raise_exception=True) a_serializer.is_valid(raise_exception=True)
a_serializer.save() a_serializer.save()
@@ -160,17 +158,21 @@ def meshcentral(request, pk):
core = CoreSettings.objects.first() core = CoreSettings.objects.first()
token = agent.get_login_token( token = agent.get_login_token(
key=core.mesh_token, user=f"user//{core.mesh_username}" key=core.mesh_token, user=f"user//{core.mesh_username}" # type:ignore
) )
if token == "err": if token == "err":
return notify_error("Invalid mesh token") return notify_error("Invalid mesh token")
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31" control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31" # type:ignore
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31" terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31" # type:ignore
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31" file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31" # type:ignore
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname) AuditLog.audit_mesh_session(
username=request.user.username,
agent=agent,
debug_info={"ip": request._client_ip},
)
ret = { ret = {
"hostname": agent.hostname, "hostname": agent.hostname,
@@ -248,6 +250,16 @@ def send_raw_cmd(request):
"shell": request.data["shell"], "shell": request.data["shell"],
}, },
} }
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
hist = AgentHistory.objects.create(
agent=agent,
type="cmd_run",
command=request.data["cmd"],
username=request.user.username[:50],
)
data["id"] = hist.pk
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2)) r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
if r == "timeout": if r == "timeout":
@@ -255,9 +267,10 @@ def send_raw_cmd(request):
AuditLog.audit_raw_command( AuditLog.audit_raw_command(
username=request.user.username, username=request.user.username,
hostname=agent.hostname, agent=agent,
cmd=request.data["cmd"], cmd=request.data["cmd"],
shell=request.data["shell"], shell=request.data["shell"],
debug_info={"ip": request._client_ip},
) )
return Response(r) return Response(r)
@@ -508,7 +521,7 @@ def install_agent(request):
try: try:
os.remove(ps1) os.remove(ps1)
except Exception as e: except Exception as e:
logger.error(str(e)) DebugLog.error(message=str(e))
with open(ps1, "w") as f: with open(ps1, "w") as f:
f.write(text) f.write(text)
@@ -566,26 +579,41 @@ def recover(request):
@permission_classes([IsAuthenticated, RunScriptPerms]) @permission_classes([IsAuthenticated, RunScriptPerms])
def run_script(request): def run_script(request):
agent = get_object_or_404(Agent, pk=request.data["pk"]) agent = get_object_or_404(Agent, pk=request.data["pk"])
script = get_object_or_404(Script, pk=request.data["scriptPK"]) script = get_object_or_404(Script, pk=request.data["script"])
output = request.data["output"] output = request.data["output"]
args = request.data["args"] args = request.data["args"]
req_timeout = int(request.data["timeout"]) + 3 req_timeout = int(request.data["timeout"]) + 3
AuditLog.audit_script_run( AuditLog.audit_script_run(
username=request.user.username, username=request.user.username,
hostname=agent.hostname, agent=agent,
script=script.name, script=script.name,
debug_info={"ip": request._client_ip},
) )
history_pk = 0
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
hist = AgentHistory.objects.create(
agent=agent,
type="script_run",
script=script,
username=request.user.username[:50],
)
history_pk = hist.pk
if output == "wait": if output == "wait":
r = agent.run_script( r = agent.run_script(
scriptpk=script.pk, args=args, timeout=req_timeout, wait=True scriptpk=script.pk,
args=args,
timeout=req_timeout,
wait=True,
history_pk=history_pk,
) )
return Response(r) return Response(r)
elif output == "email": elif output == "email":
emails = ( emails = (
[] if request.data["emailmode"] == "default" else request.data["emails"] [] if request.data["emailMode"] == "default" else request.data["emails"]
) )
run_script_email_results_task.delay( run_script_email_results_task.delay(
agentpk=agent.pk, agentpk=agent.pk,
@@ -594,8 +622,51 @@ def run_script(request):
emails=emails, emails=emails,
args=args, args=args,
) )
elif output == "collector":
from core.models import CustomField
r = agent.run_script(
scriptpk=script.pk,
args=args,
timeout=req_timeout,
wait=True,
history_pk=history_pk,
)
custom_field = CustomField.objects.get(pk=request.data["custom_field"])
if custom_field.model == "agent":
field = custom_field.get_or_create_field_value(agent)
elif custom_field.model == "client":
field = custom_field.get_or_create_field_value(agent.client)
elif custom_field.model == "site":
field = custom_field.get_or_create_field_value(agent.site)
else:
return notify_error("Custom Field was invalid")
value = (
r.strip()
if request.data["save_all_output"]
else r.strip().split("\n")[-1].strip()
)
field.save_to_field(value)
return Response(r)
elif output == "note":
r = agent.run_script(
scriptpk=script.pk,
args=args,
timeout=req_timeout,
wait=True,
history_pk=history_pk,
)
Note.objects.create(agent=agent, user=request.user, note=r)
return Response(r)
else: else:
agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout) agent.run_script(
scriptpk=script.pk, args=args, timeout=req_timeout, history_pk=history_pk
)
return Response(f"{script.name} will now be run on {agent.hostname}") return Response(f"{script.name} will now be run on {agent.hostname}")
@@ -668,7 +739,7 @@ class GetEditDeleteNote(APIView):
@api_view(["POST"]) @api_view(["POST"])
@permission_classes([IsAuthenticated, RunBulkPerms]) @permission_classes([IsAuthenticated, RunBulkPerms])
def bulk(request): def bulk(request):
if request.data["target"] == "agents" and not request.data["agentPKs"]: if request.data["target"] == "agents" and not request.data["agents"]:
return notify_error("Must select at least 1 agent") return notify_error("Must select at least 1 agent")
if request.data["target"] == "client": if request.data["target"] == "client":
@@ -676,7 +747,7 @@ def bulk(request):
elif request.data["target"] == "site": elif request.data["target"] == "site":
q = Agent.objects.filter(site_id=request.data["site"]) q = Agent.objects.filter(site_id=request.data["site"])
elif request.data["target"] == "agents": elif request.data["target"] == "agents":
q = Agent.objects.filter(pk__in=request.data["agentPKs"]) q = Agent.objects.filter(pk__in=request.data["agents"])
elif request.data["target"] == "all": elif request.data["target"] == "all":
q = Agent.objects.only("pk", "monitoring_type") q = Agent.objects.only("pk", "monitoring_type")
else: else:
@@ -689,29 +760,48 @@ def bulk(request):
agents: list[int] = [agent.pk for agent in q] agents: list[int] = [agent.pk for agent in q]
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data) if not agents:
return notify_error("No agents where found meeting the selected criteria")
AuditLog.audit_bulk_action(
request.user,
request.data["mode"],
request.data,
debug_info={"ip": request._client_ip},
)
if request.data["mode"] == "command": if request.data["mode"] == "command":
handle_bulk_command_task.delay( handle_bulk_command_task.delay(
agents, request.data["cmd"], request.data["shell"], request.data["timeout"] agents,
request.data["cmd"],
request.data["shell"],
request.data["timeout"],
request.user.username[:50],
run_on_offline=request.data["offlineAgents"],
) )
return Response(f"Command will now be run on {len(agents)} agents") return Response(f"Command will now be run on {len(agents)} agents")
elif request.data["mode"] == "script": elif request.data["mode"] == "script":
script = get_object_or_404(Script, pk=request.data["scriptPK"]) script = get_object_or_404(Script, pk=request.data["script"])
handle_bulk_script_task.delay( handle_bulk_script_task.delay(
script.pk, agents, request.data["args"], request.data["timeout"] script.pk,
agents,
request.data["args"],
request.data["timeout"],
request.user.username[:50],
) )
return Response(f"{script.name} will now be run on {len(agents)} agents") return Response(f"{script.name} will now be run on {len(agents)} agents")
elif request.data["mode"] == "install": elif request.data["mode"] == "patch":
bulk_install_updates_task.delay(agents)
return Response( if request.data["patchMode"] == "install":
f"Pending updates will now be installed on {len(agents)} agents" bulk_install_updates_task.delay(agents)
) return Response(
elif request.data["mode"] == "scan": f"Pending updates will now be installed on {len(agents)} agents"
bulk_check_for_updates_task.delay(agents) )
return Response(f"Patch status scan will now run on {len(agents)} agents") elif request.data["patchMode"] == "scan":
bulk_check_for_updates_task.delay(agents)
return Response(f"Patch status scan will now run on {len(agents)} agents")
return notify_error("Something went wrong") return notify_error("Something went wrong")
@@ -746,3 +836,11 @@ class WMI(APIView):
if r != "ok": if r != "ok":
return notify_error("Unable to contact the agent") return notify_error("Unable to contact the agent")
return Response("ok") return Response("ok")
class AgentHistoryView(APIView):
def get(self, request, pk):
agent = get_object_or_404(Agent, pk=pk)
history = AgentHistory.objects.filter(agent=agent)
ctx = {"default_tz": get_default_timezone()}
return Response(AgentHistorySerializer(history, many=True, context=ctx).data)

View File

@@ -0,0 +1,33 @@
# Generated by Django 3.2.1 on 2021-07-21 04:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alerts', '0006_auto_20210217_1736'),
]
operations = [
migrations.AddField(
model_name='alerttemplate',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='alerttemplate',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='alerttemplate',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='alerttemplate',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -0,0 +1,28 @@
# Generated by Django 3.2.1 on 2021-07-21 17:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alerts', '0007_auto_20210721_0423'),
]
operations = [
migrations.AddField(
model_name='alerttemplate',
name='agent_script_actions',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AddField(
model_name='alerttemplate',
name='check_script_actions',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AddField(
model_name='alerttemplate',
name='task_script_actions',
field=models.BooleanField(blank=True, default=None, null=True),
),
]

View File

@@ -0,0 +1,28 @@
# Generated by Django 3.2.1 on 2021-07-21 18:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alerts', '0008_auto_20210721_1757'),
]
operations = [
migrations.AlterField(
model_name='alerttemplate',
name='agent_script_actions',
field=models.BooleanField(blank=True, default=True, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='check_script_actions',
field=models.BooleanField(blank=True, default=True, null=True),
),
migrations.AlterField(
model_name='alerttemplate',
name='task_script_actions',
field=models.BooleanField(blank=True, default=True, null=True),
),
]

View File

@@ -3,19 +3,18 @@ from __future__ import annotations
import re import re
from typing import TYPE_CHECKING, Union from typing import TYPE_CHECKING, Union
from django.conf import settings
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db import models from django.db import models
from django.db.models.fields import BooleanField, PositiveIntegerField from django.db.models.fields import BooleanField, PositiveIntegerField
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger
from logs.models import BaseAuditModel, DebugLog
if TYPE_CHECKING: if TYPE_CHECKING:
from agents.models import Agent from agents.models import Agent
from autotasks.models import AutomatedTask from autotasks.models import AutomatedTask
from checks.models import Check from checks.models import Check
logger.configure(**settings.LOG_CONFIG)
SEVERITY_CHOICES = [ SEVERITY_CHOICES = [
("info", "Informational"), ("info", "Informational"),
@@ -173,6 +172,7 @@ class Alert(models.Model):
always_email = alert_template.agent_always_email always_email = alert_template.agent_always_email
always_text = alert_template.agent_always_text always_text = alert_template.agent_always_text
alert_interval = alert_template.agent_periodic_alert_days alert_interval = alert_template.agent_periodic_alert_days
run_script_action = alert_template.agent_script_actions
if instance.should_create_alert(alert_template): if instance.should_create_alert(alert_template):
alert = cls.create_or_return_availability_alert(instance) alert = cls.create_or_return_availability_alert(instance)
@@ -209,6 +209,7 @@ class Alert(models.Model):
always_email = alert_template.check_always_email always_email = alert_template.check_always_email
always_text = alert_template.check_always_text always_text = alert_template.check_always_text
alert_interval = alert_template.check_periodic_alert_days alert_interval = alert_template.check_periodic_alert_days
run_script_action = alert_template.check_script_actions
if instance.should_create_alert(alert_template): if instance.should_create_alert(alert_template):
alert = cls.create_or_return_check_alert(instance) alert = cls.create_or_return_check_alert(instance)
@@ -242,6 +243,7 @@ class Alert(models.Model):
always_email = alert_template.task_always_email always_email = alert_template.task_always_email
always_text = alert_template.task_always_text always_text = alert_template.task_always_text
alert_interval = alert_template.task_periodic_alert_days alert_interval = alert_template.task_periodic_alert_days
run_script_action = alert_template.task_script_actions
if instance.should_create_alert(alert_template): if instance.should_create_alert(alert_template):
alert = cls.create_or_return_task_alert(instance) alert = cls.create_or_return_task_alert(instance)
@@ -295,7 +297,7 @@ class Alert(models.Model):
text_task.delay(pk=alert.pk, alert_interval=alert_interval) text_task.delay(pk=alert.pk, alert_interval=alert_interval)
# check if any scripts should be run # check if any scripts should be run
if alert_template and alert_template.action and not alert.action_run: if alert_template and alert_template.action and run_script_action and not alert.action_run: # type: ignore
r = agent.run_script( r = agent.run_script(
scriptpk=alert_template.action.pk, scriptpk=alert_template.action.pk,
args=alert.parse_script_args(alert_template.action_args), args=alert.parse_script_args(alert_template.action_args),
@@ -314,8 +316,10 @@ class Alert(models.Model):
alert.action_run = djangotime.now() alert.action_run = djangotime.now()
alert.save() alert.save()
else: else:
logger.error( DebugLog.error(
f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert" agent=agent,
log_type="scripting",
message=f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) failure alert",
) )
@classmethod @classmethod
@@ -345,6 +349,7 @@ class Alert(models.Model):
if alert_template: if alert_template:
email_on_resolved = alert_template.agent_email_on_resolved email_on_resolved = alert_template.agent_email_on_resolved
text_on_resolved = alert_template.agent_text_on_resolved text_on_resolved = alert_template.agent_text_on_resolved
run_script_action = alert_template.agent_script_actions
elif isinstance(instance, Check): elif isinstance(instance, Check):
from checks.tasks import ( from checks.tasks import (
@@ -363,6 +368,7 @@ class Alert(models.Model):
if alert_template: if alert_template:
email_on_resolved = alert_template.check_email_on_resolved email_on_resolved = alert_template.check_email_on_resolved
text_on_resolved = alert_template.check_text_on_resolved text_on_resolved = alert_template.check_text_on_resolved
run_script_action = alert_template.check_script_actions
elif isinstance(instance, AutomatedTask): elif isinstance(instance, AutomatedTask):
from autotasks.tasks import ( from autotasks.tasks import (
@@ -381,6 +387,7 @@ class Alert(models.Model):
if alert_template: if alert_template:
email_on_resolved = alert_template.task_email_on_resolved email_on_resolved = alert_template.task_email_on_resolved
text_on_resolved = alert_template.task_text_on_resolved text_on_resolved = alert_template.task_text_on_resolved
run_script_action = alert_template.task_script_actions
else: else:
return return
@@ -403,6 +410,7 @@ class Alert(models.Model):
if ( if (
alert_template alert_template
and alert_template.resolved_action and alert_template.resolved_action
and run_script_action # type: ignore
and not alert.resolved_action_run and not alert.resolved_action_run
): ):
r = agent.run_script( r = agent.run_script(
@@ -425,8 +433,10 @@ class Alert(models.Model):
alert.resolved_action_run = djangotime.now() alert.resolved_action_run = djangotime.now()
alert.save() alert.save()
else: else:
logger.error( DebugLog.error(
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert" agent=agent,
log_type="scripting",
message=f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname}({agent.pk}) resolved alert",
) )
def parse_script_args(self, args: list[str]): def parse_script_args(self, args: list[str]):
@@ -451,7 +461,7 @@ class Alert(models.Model):
try: try:
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
except Exception as e: except Exception as e:
logger.error(e) DebugLog.error(log_type="scripting", message=e)
continue continue
else: else:
@@ -460,7 +470,7 @@ class Alert(models.Model):
return temp_args return temp_args
class AlertTemplate(models.Model): class AlertTemplate(BaseAuditModel):
name = models.CharField(max_length=100) name = models.CharField(max_length=100)
is_active = models.BooleanField(default=True) is_active = models.BooleanField(default=True)
@@ -517,6 +527,7 @@ class AlertTemplate(models.Model):
agent_always_text = BooleanField(null=True, blank=True, default=None) agent_always_text = BooleanField(null=True, blank=True, default=None)
agent_always_alert = BooleanField(null=True, blank=True, default=None) agent_always_alert = BooleanField(null=True, blank=True, default=None)
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
agent_script_actions = BooleanField(null=True, blank=True, default=True)
# check alert settings # check alert settings
check_email_alert_severity = ArrayField( check_email_alert_severity = ArrayField(
@@ -540,6 +551,7 @@ class AlertTemplate(models.Model):
check_always_text = BooleanField(null=True, blank=True, default=None) check_always_text = BooleanField(null=True, blank=True, default=None)
check_always_alert = BooleanField(null=True, blank=True, default=None) check_always_alert = BooleanField(null=True, blank=True, default=None)
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
check_script_actions = BooleanField(null=True, blank=True, default=True)
# task alert settings # task alert settings
task_email_alert_severity = ArrayField( task_email_alert_severity = ArrayField(
@@ -563,6 +575,7 @@ class AlertTemplate(models.Model):
task_always_text = BooleanField(null=True, blank=True, default=None) task_always_text = BooleanField(null=True, blank=True, default=None)
task_always_alert = BooleanField(null=True, blank=True, default=None) task_always_alert = BooleanField(null=True, blank=True, default=None)
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
task_script_actions = BooleanField(null=True, blank=True, default=True)
# exclusion settings # exclusion settings
exclude_workstations = BooleanField(null=True, blank=True, default=False) exclude_workstations = BooleanField(null=True, blank=True, default=False)
@@ -581,6 +594,13 @@ class AlertTemplate(models.Model):
def __str__(self): def __str__(self):
return self.name return self.name
@staticmethod
def serialize(alert_template):
# serializes the agent and returns json
from .serializers import AlertTemplateAuditSerializer
return AlertTemplateAuditSerializer(alert_template).data
@property @property
def has_agent_settings(self) -> bool: def has_agent_settings(self) -> bool:
return ( return (

View File

@@ -119,3 +119,9 @@ class AlertTemplateRelationSerializer(ModelSerializer):
class Meta: class Meta:
model = AlertTemplate model = AlertTemplate
fields = "__all__" fields = "__all__"
class AlertTemplateAuditSerializer(ModelSerializer):
class Meta:
model = AlertTemplate
fields = "__all__"

View File

@@ -1,11 +1,10 @@
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from alerts.models import Alert
from tacticalrmm.celery import app from tacticalrmm.celery import app
@app.task @app.task
def unsnooze_alerts() -> str: def unsnooze_alerts() -> str:
from .models import Alert
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update( Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
snoozed=False, snooze_until=None snoozed=False, snooze_until=None
@@ -22,3 +21,14 @@ def cache_agents_alert_template():
agent.set_alert_template() agent.set_alert_template()
return "ok" return "ok"
@app.task
def prune_resolved_alerts(older_than_days: int) -> str:
from .models import Alert
Alert.objects.filter(resolved=True).filter(
alert_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
).delete()
return "ok"

View File

@@ -1,14 +1,13 @@
from datetime import datetime, timedelta from datetime import datetime, timedelta
from unittest.mock import patch from unittest.mock import patch
from core.models import CoreSettings
from django.conf import settings from django.conf import settings
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from model_bakery import baker, seq from model_bakery import baker, seq
from tacticalrmm.test import TacticalTestCase
from alerts.tasks import cache_agents_alert_template from alerts.tasks import cache_agents_alert_template
from autotasks.models import AutomatedTask
from core.models import CoreSettings
from tacticalrmm.test import TacticalTestCase
from .models import Alert, AlertTemplate from .models import Alert, AlertTemplate
from .serializers import ( from .serializers import (
@@ -330,8 +329,8 @@ class TestAlertsViews(TacticalTestCase):
baker.make("clients.Site", alert_template=alert_template, _quantity=3) baker.make("clients.Site", alert_template=alert_template, _quantity=3)
baker.make("automation.Policy", alert_template=alert_template) baker.make("automation.Policy", alert_template=alert_template)
core = CoreSettings.objects.first() core = CoreSettings.objects.first()
core.alert_template = alert_template core.alert_template = alert_template # type: ignore
core.save() core.save() # type: ignore
url = f"/alerts/alerttemplates/{alert_template.pk}/related/" # type: ignore url = f"/alerts/alerttemplates/{alert_template.pk}/related/" # type: ignore
@@ -403,16 +402,16 @@ class TestAlertTasks(TacticalTestCase):
# assign first Alert Template as to a policy and apply it as default # assign first Alert Template as to a policy and apply it as default
policy.alert_template = alert_templates[0] # type: ignore policy.alert_template = alert_templates[0] # type: ignore
policy.save() # type: ignore policy.save() # type: ignore
core.workstation_policy = policy core.workstation_policy = policy # type: ignore
core.server_policy = policy core.server_policy = policy # type: ignore
core.save() core.save() # type: ignore
self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk) # type: ignore self.assertEquals(server.set_alert_template().pk, alert_templates[0].pk) # type: ignore
self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk) # type: ignore self.assertEquals(workstation.set_alert_template().pk, alert_templates[0].pk) # type: ignore
# assign second Alert Template to as default alert template # assign second Alert Template to as default alert template
core.alert_template = alert_templates[1] # type: ignore core.alert_template = alert_templates[1] # type: ignore
core.save() core.save() # type: ignore
self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk) # type: ignore self.assertEquals(workstation.set_alert_template().pk, alert_templates[1].pk) # type: ignore
self.assertEquals(server.set_alert_template().pk, alert_templates[1].pk) # type: ignore self.assertEquals(server.set_alert_template().pk, alert_templates[1].pk) # type: ignore
@@ -514,6 +513,7 @@ class TestAlertTasks(TacticalTestCase):
agent_recovery_email_task, agent_recovery_email_task,
agent_recovery_sms_task, agent_recovery_sms_task,
) )
from alerts.models import Alert from alerts.models import Alert
agent_dashboard_alert = baker.make_recipe("agents.overdue_agent") agent_dashboard_alert = baker.make_recipe("agents.overdue_agent")
@@ -727,7 +727,6 @@ class TestAlertTasks(TacticalTestCase):
send_email, send_email,
sleep, sleep,
): ):
from alerts.tasks import cache_agents_alert_template
from checks.models import Check from checks.models import Check
from checks.tasks import ( from checks.tasks import (
handle_check_email_alert_task, handle_check_email_alert_task,
@@ -736,6 +735,8 @@ class TestAlertTasks(TacticalTestCase):
handle_resolved_check_sms_alert_task, handle_resolved_check_sms_alert_task,
) )
from alerts.tasks import cache_agents_alert_template
# create test data # create test data
agent = baker.make_recipe("agents.agent") agent = baker.make_recipe("agents.agent")
agent_no_settings = baker.make_recipe("agents.agent") agent_no_settings = baker.make_recipe("agents.agent")
@@ -1011,7 +1012,6 @@ class TestAlertTasks(TacticalTestCase):
send_email, send_email,
sleep, sleep,
): ):
from alerts.tasks import cache_agents_alert_template
from autotasks.models import AutomatedTask from autotasks.models import AutomatedTask
from autotasks.tasks import ( from autotasks.tasks import (
handle_resolved_task_email_alert, handle_resolved_task_email_alert,
@@ -1020,6 +1020,8 @@ class TestAlertTasks(TacticalTestCase):
handle_task_sms_alert, handle_task_sms_alert,
) )
from alerts.tasks import cache_agents_alert_template
# create test data # create test data
agent = baker.make_recipe("agents.agent") agent = baker.make_recipe("agents.agent")
agent_no_settings = baker.make_recipe("agents.agent") agent_no_settings = baker.make_recipe("agents.agent")
@@ -1272,17 +1274,17 @@ class TestAlertTasks(TacticalTestCase):
) )
core = CoreSettings.objects.first() core = CoreSettings.objects.first()
core.smtp_host = "test.test.com" core.smtp_host = "test.test.com" # type: ignore
core.smtp_port = 587 core.smtp_port = 587 # type: ignore
core.smtp_recipients = ["recipient@test.com"] core.smtp_recipients = ["recipient@test.com"] # type: ignore
core.twilio_account_sid = "test" core.twilio_account_sid = "test" # type: ignore
core.twilio_auth_token = "1234123412341234" core.twilio_auth_token = "1234123412341234" # type: ignore
core.sms_alert_recipients = ["+1234567890"] core.sms_alert_recipients = ["+1234567890"] # type: ignore
# test sending email with alert template settings # test sending email with alert template settings
core.send_mail("Test", "Test", alert_template=alert_template) core.send_mail("Test", "Test", alert_template=alert_template) # type: ignore
core.send_sms("Test", alert_template=alert_template) core.send_sms("Test", alert_template=alert_template) # type: ignore
@patch("agents.models.Agent.nats_cmd") @patch("agents.models.Agent.nats_cmd")
@patch("agents.tasks.agent_outage_sms_task.delay") @patch("agents.tasks.agent_outage_sms_task.delay")
@@ -1315,6 +1317,7 @@ class TestAlertTasks(TacticalTestCase):
"alerts.AlertTemplate", "alerts.AlertTemplate",
is_active=True, is_active=True,
agent_always_alert=True, agent_always_alert=True,
agent_script_actions=False,
action=failure_action, action=failure_action,
action_timeout=30, action_timeout=30,
resolved_action=resolved_action, resolved_action=resolved_action,
@@ -1328,6 +1331,14 @@ class TestAlertTasks(TacticalTestCase):
agent_outages_task() agent_outages_task()
# should not have been called since agent_script_actions is set to False
nats_cmd.assert_not_called()
alert_template.agent_script_actions = True # type: ignore
alert_template.save() # type: ignore
agent_outages_task()
# this is what data should be # this is what data should be
data = { data = {
"func": "runscriptfull", "func": "runscriptfull",
@@ -1340,14 +1351,6 @@ class TestAlertTasks(TacticalTestCase):
nats_cmd.reset_mock() nats_cmd.reset_mock()
# Setup cmd mock
success = {
"retcode": 0,
"stdout": "success!",
"stderr": "",
"execution_time": 5.0000,
}
nats_cmd.side_effect = ["pong", success] nats_cmd.side_effect = ["pong", success]
# make sure script run results were stored # make sure script run results were stored
@@ -1398,3 +1401,36 @@ class TestAlertTasks(TacticalTestCase):
["-Parameter", f"-Another '{alert.id}'"], # type: ignore ["-Parameter", f"-Another '{alert.id}'"], # type: ignore
alert.parse_script_args(args=args), # type: ignore alert.parse_script_args(args=args), # type: ignore
) )
def test_prune_resolved_alerts(self):
from .tasks import prune_resolved_alerts
# setup data
resolved_alerts = baker.make(
"alerts.Alert",
resolved=True,
_quantity=25,
)
alerts = baker.make(
"alerts.Alert",
resolved=False,
_quantity=25,
)
days = 0
for alert in resolved_alerts: # type: ignore
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
alert.save()
days = days + 5
days = 0
for alert in alerts: # type: ignore
alert.alert_time = djangotime.now() - djangotime.timedelta(days=days)
alert.save()
days = days + 5
# delete AgentHistory older than 30 days
prune_resolved_alerts(30)
self.assertEqual(Alert.objects.count(), 31)

View File

@@ -20,4 +20,5 @@ urlpatterns = [
path("superseded/", views.SupersededWinUpdate.as_view()), path("superseded/", views.SupersededWinUpdate.as_view()),
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()), path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()), path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
path("<int:pk>/<str:agentid>/histresult/", views.AgentHistoryResult.as_view()),
] ]

View File

@@ -6,7 +6,6 @@ from django.conf import settings
from django.http import HttpResponse from django.http import HttpResponse
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger
from packaging import version as pyver from packaging import version as pyver
from rest_framework.authentication import TokenAuthentication from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token from rest_framework.authtoken.models import Token
@@ -15,20 +14,18 @@ from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from accounts.models import User from accounts.models import User
from agents.models import Agent, AgentCustomField from agents.models import Agent, AgentHistory
from agents.serializers import WinAgentSerializer from agents.serializers import WinAgentSerializer, AgentHistorySerializer
from autotasks.models import AutomatedTask from autotasks.models import AutomatedTask
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
from checks.models import Check from checks.models import Check
from checks.serializers import CheckRunnerGetSerializer from checks.serializers import CheckRunnerGetSerializer
from checks.utils import bytes2human from checks.utils import bytes2human
from logs.models import PendingAction from logs.models import PendingAction, DebugLog
from software.models import InstalledSoftware from software.models import InstalledSoftware
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
from winupdate.models import WinUpdate, WinUpdatePolicy from winupdate.models import WinUpdate, WinUpdatePolicy
logger.configure(**settings.LOG_CONFIG)
class CheckIn(APIView): class CheckIn(APIView):
@@ -36,6 +33,10 @@ class CheckIn(APIView):
permission_classes = [IsAuthenticated] permission_classes = [IsAuthenticated]
def patch(self, request): def patch(self, request):
"""
!!! DEPRECATED AS OF AGENT 1.6.0 !!!
Endpoint be removed in a future release
"""
from alerts.models import Alert from alerts.models import Alert
updated = False updated = False
@@ -182,7 +183,11 @@ class WinUpdates(APIView):
if reboot: if reboot:
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False)) asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
logger.info(f"{agent.hostname} is rebooting after updates were installed.") DebugLog.info(
agent=agent,
log_type="windows_updates",
message=f"{agent.hostname} is rebooting after updates were installed.",
)
agent.delete_superseded_updates() agent.delete_superseded_updates()
return Response("ok") return Response("ok")
@@ -350,13 +355,12 @@ class TaskRunner(APIView):
permission_classes = [IsAuthenticated] permission_classes = [IsAuthenticated]
def get(self, request, pk, agentid): def get(self, request, pk, agentid):
agent = get_object_or_404(Agent, agent_id=agentid) _ = get_object_or_404(Agent, agent_id=agentid)
task = get_object_or_404(AutomatedTask, pk=pk) task = get_object_or_404(AutomatedTask, pk=pk)
return Response(TaskGOGetSerializer(task).data) return Response(TaskGOGetSerializer(task).data)
def patch(self, request, pk, agentid): def patch(self, request, pk, agentid):
from alerts.models import Alert from alerts.models import Alert
from logs.models import AuditLog
agent = get_object_or_404(Agent, agent_id=agentid) agent = get_object_or_404(Agent, agent_id=agentid)
task = get_object_or_404(AutomatedTask, pk=pk) task = get_object_or_404(AutomatedTask, pk=pk)
@@ -371,38 +375,7 @@ class TaskRunner(APIView):
if task.custom_field: if task.custom_field:
if not task.stderr: if not task.stderr:
if AgentCustomField.objects.filter( task.save_collector_results()
field=task.custom_field, agent=task.agent
).exists():
agent_field = AgentCustomField.objects.get(
field=task.custom_field, agent=task.agent
)
else:
agent_field = AgentCustomField.objects.create(
field=task.custom_field, agent=task.agent
)
# get last line of stdout
value = (
new_task.stdout
if task.collector_all_output
else new_task.stdout.split("\n")[-1].strip()
)
if task.custom_field.type in [
"text",
"number",
"single",
"datetime",
]:
agent_field.string_value = value
agent_field.save()
elif task.custom_field.type == "multiple":
agent_field.multiple_value = value.split(",")
agent_field.save()
elif task.custom_field.type == "checkbox":
agent_field.bool_value = bool(value)
agent_field.save()
status = "passing" status = "passing"
else: else:
@@ -419,15 +392,6 @@ class TaskRunner(APIView):
else: else:
Alert.handle_alert_failure(new_task) Alert.handle_alert_failure(new_task)
AuditLog.objects.create(
username=agent.hostname,
agent=agent.hostname,
object_type="agent",
action="task_run",
message=f"Scheduled Task {task.name} was run on {agent.hostname}",
after_value=AutomatedTask.serialize(new_task),
)
return Response("ok") return Response("ok")
@@ -518,6 +482,7 @@ class NewAgent(APIView):
action="agent_install", action="agent_install",
message=f"{request.user} installed new agent {agent.hostname}", message=f"{request.user} installed new agent {agent.hostname}",
after_value=Agent.serialize(agent), after_value=Agent.serialize(agent),
debug_info={"ip": request._client_ip},
) )
return Response( return Response(
@@ -622,3 +587,16 @@ class AgentRecovery(APIView):
reload_nats() reload_nats()
return Response(ret) return Response(ret)
class AgentHistoryResult(APIView):
authentication_classes = [TokenAuthentication]
permission_classes = [IsAuthenticated]
def patch(self, request, agentid, pk):
_ = get_object_or_404(Agent, agent_id=agentid)
hist = get_object_or_404(AgentHistory, pk=pk)
s = AgentHistorySerializer(instance=hist, data=request.data, partial=True)
s.is_valid(raise_exception=True)
s.save()
return Response("ok")

View File

@@ -33,7 +33,7 @@ class Policy(BaseAuditModel):
# get old policy if exists # get old policy if exists
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
super(BaseAuditModel, self).save(*args, **kwargs) super(Policy, self).save(old_model=old_policy, *args, **kwargs)
# generate agent checks only if active and enforced were changed # generate agent checks only if active and enforced were changed
if old_policy: if old_policy:
@@ -50,7 +50,7 @@ class Policy(BaseAuditModel):
from automation.tasks import generate_agent_checks_task from automation.tasks import generate_agent_checks_task
agents = list(self.related_agents().only("pk").values_list("pk", flat=True)) agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
super(BaseAuditModel, self).delete(*args, **kwargs) super(Policy, self).delete(*args, **kwargs)
generate_agent_checks_task.delay(agents=agents, create_tasks=True) generate_agent_checks_task.delay(agents=agents, create_tasks=True)
@@ -126,9 +126,9 @@ class Policy(BaseAuditModel):
@staticmethod @staticmethod
def serialize(policy): def serialize(policy):
# serializes the policy and returns json # serializes the policy and returns json
from .serializers import PolicySerializer from .serializers import PolicyAuditSerializer
return PolicySerializer(policy).data return PolicyAuditSerializer(policy).data
@staticmethod @staticmethod
def cascade_policy_tasks(agent): def cascade_policy_tasks(agent):

View File

@@ -89,3 +89,9 @@ class AutoTasksFieldSerializer(ModelSerializer):
model = AutomatedTask model = AutomatedTask
fields = "__all__" fields = "__all__"
depth = 1 depth = 1
class PolicyAuditSerializer(ModelSerializer):
class Meta:
model = Policy
fields = "__all__"

View File

@@ -918,11 +918,13 @@ class TestPolicyTasks(TacticalTestCase):
@patch("autotasks.models.AutomatedTask.create_task_on_agent") @patch("autotasks.models.AutomatedTask.create_task_on_agent")
@patch("autotasks.models.AutomatedTask.delete_task_on_agent") @patch("autotasks.models.AutomatedTask.delete_task_on_agent")
def test_delete_policy_tasks(self, delete_task_on_agent, create_task): def test_delete_policy_tasks(self, delete_task_on_agent, create_task):
from .tasks import delete_policy_autotasks_task from .tasks import delete_policy_autotasks_task, generate_agent_checks_task
policy = baker.make("automation.Policy", active=True) policy = baker.make("automation.Policy", active=True)
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
baker.make_recipe("agents.server_agent", policy=policy) agent = baker.make_recipe("agents.server_agent", policy=policy)
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
delete_policy_autotasks_task(task=tasks[0].id) # type: ignore delete_policy_autotasks_task(task=tasks[0].id) # type: ignore
@@ -931,11 +933,13 @@ class TestPolicyTasks(TacticalTestCase):
@patch("autotasks.models.AutomatedTask.create_task_on_agent") @patch("autotasks.models.AutomatedTask.create_task_on_agent")
@patch("autotasks.models.AutomatedTask.run_win_task") @patch("autotasks.models.AutomatedTask.run_win_task")
def test_run_policy_task(self, run_win_task, create_task): def test_run_policy_task(self, run_win_task, create_task):
from .tasks import run_win_policy_autotasks_task from .tasks import run_win_policy_autotasks_task, generate_agent_checks_task
policy = baker.make("automation.Policy", active=True) policy = baker.make("automation.Policy", active=True)
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
baker.make_recipe("agents.server_agent", policy=policy) agent = baker.make_recipe("agents.server_agent", policy=policy)
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
run_win_policy_autotasks_task(task=tasks[0].id) # type: ignore run_win_policy_autotasks_task(task=tasks[0].id) # type: ignore
@@ -944,7 +948,10 @@ class TestPolicyTasks(TacticalTestCase):
@patch("autotasks.models.AutomatedTask.create_task_on_agent") @patch("autotasks.models.AutomatedTask.create_task_on_agent")
@patch("autotasks.models.AutomatedTask.modify_task_on_agent") @patch("autotasks.models.AutomatedTask.modify_task_on_agent")
def test_update_policy_tasks(self, modify_task_on_agent, create_task): def test_update_policy_tasks(self, modify_task_on_agent, create_task):
from .tasks import update_policy_autotasks_fields_task from .tasks import (
update_policy_autotasks_fields_task,
generate_agent_checks_task,
)
# setup data # setup data
policy = baker.make("automation.Policy", active=True) policy = baker.make("automation.Policy", active=True)
@@ -956,6 +963,8 @@ class TestPolicyTasks(TacticalTestCase):
) )
agent = baker.make_recipe("agents.server_agent", policy=policy) agent = baker.make_recipe("agents.server_agent", policy=policy)
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
tasks[0].enabled = False # type: ignore tasks[0].enabled = False # type: ignore
tasks[0].save() # type: ignore tasks[0].save() # type: ignore
@@ -995,6 +1004,8 @@ class TestPolicyTasks(TacticalTestCase):
@patch("autotasks.models.AutomatedTask.create_task_on_agent") @patch("autotasks.models.AutomatedTask.create_task_on_agent")
def test_policy_exclusions(self, create_task): def test_policy_exclusions(self, create_task):
from .tasks import generate_agent_checks_task
# setup data # setup data
policy = baker.make("automation.Policy", active=True) policy = baker.make("automation.Policy", active=True)
baker.make_recipe("checks.memory_check", policy=policy) baker.make_recipe("checks.memory_check", policy=policy)
@@ -1003,6 +1014,8 @@ class TestPolicyTasks(TacticalTestCase):
"agents.agent", policy=policy, monitoring_type="server" "agents.agent", policy=policy, monitoring_type="server"
) )
generate_agent_checks_task(agents=[agent.pk], create_tasks=True)
# make sure related agents on policy returns correctly # make sure related agents on policy returns correctly
self.assertEqual(policy.related_agents().count(), 1) # type: ignore self.assertEqual(policy.related_agents().count(), 1) # type: ignore
self.assertEqual(agent.agentchecks.count(), 1) # type: ignore self.assertEqual(agent.agentchecks.count(), 1) # type: ignore

View File

@@ -6,19 +6,15 @@ from typing import List
import pytz import pytz
from alerts.models import SEVERITY_CHOICES from alerts.models import SEVERITY_CHOICES
from django.conf import settings
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db import models from django.db import models
from django.db.models.fields import DateTimeField from django.db.models.fields import DateTimeField
from django.db.utils import DatabaseError from django.db.utils import DatabaseError
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from logs.models import BaseAuditModel from logs.models import BaseAuditModel, DebugLog
from loguru import logger
from packaging import version as pyver from packaging import version as pyver
from tacticalrmm.utils import bitdays_to_string from tacticalrmm.utils import bitdays_to_string
logger.configure(**settings.LOG_CONFIG)
RUN_TIME_DAY_CHOICES = [ RUN_TIME_DAY_CHOICES = [
(0, "Monday"), (0, "Monday"),
(1, "Tuesday"), (1, "Tuesday"),
@@ -195,12 +191,20 @@ class AutomatedTask(BaseAuditModel):
@staticmethod @staticmethod
def serialize(task): def serialize(task):
# serializes the task and returns json # serializes the task and returns json
from .serializers import TaskSerializer from .serializers import TaskAuditSerializer
return TaskSerializer(task).data return TaskAuditSerializer(task).data
def create_policy_task(self, agent=None, policy=None, assigned_check=None): def create_policy_task(self, agent=None, policy=None, assigned_check=None):
# added to allow new policy tasks to be assigned to check only when the agent check exists already
if (
self.assigned_check
and agent
and agent.agentchecks.filter(parent_check=self.assigned_check.id).exists()
):
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.id)
# if policy is present, then this task is being copied to another policy # if policy is present, then this task is being copied to another policy
# if agent is present, then this task is being created on an agent from a policy # if agent is present, then this task is being created on an agent from a policy
# exit if neither are set or if both are set # exit if neither are set or if both are set
@@ -254,7 +258,7 @@ class AutomatedTask(BaseAuditModel):
elif self.task_type == "runonce": elif self.task_type == "runonce":
# check if scheduled time is in the past # check if scheduled time is in the past
agent_tz = pytz.timezone(agent.timezone) agent_tz = pytz.timezone(agent.timezone) # type: ignore
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone( task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
pytz.utc pytz.utc
) )
@@ -280,7 +284,7 @@ class AutomatedTask(BaseAuditModel):
}, },
} }
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse( if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse( # type: ignore
"1.4.7" "1.4.7"
): ):
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
@@ -301,19 +305,25 @@ class AutomatedTask(BaseAuditModel):
else: else:
return "error" return "error"
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
if r != "ok": if r != "ok":
self.sync_status = "initial" self.sync_status = "initial"
self.save(update_fields=["sync_status"]) self.save(update_fields=["sync_status"])
logger.warning( DebugLog.warning(
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in." agent=agent,
log_type="agent_issues",
message=f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in.", # type: ignore
) )
return "timeout" return "timeout"
else: else:
self.sync_status = "synced" self.sync_status = "synced"
self.save(update_fields=["sync_status"]) self.save(update_fields=["sync_status"])
logger.info(f"{agent.hostname} task {self.name} was successfully created") DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"{agent.hostname} task {self.name} was successfully created", # type: ignore
)
return "ok" return "ok"
@@ -333,19 +343,25 @@ class AutomatedTask(BaseAuditModel):
"enabled": self.enabled, "enabled": self.enabled,
}, },
} }
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) r = asyncio.run(agent.nats_cmd(nats_data, timeout=5)) # type: ignore
if r != "ok": if r != "ok":
self.sync_status = "notsynced" self.sync_status = "notsynced"
self.save(update_fields=["sync_status"]) self.save(update_fields=["sync_status"])
logger.warning( DebugLog.warning(
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin" agent=agent,
log_type="agent_issues",
message=f"Unable to modify scheduled task {self.name} on {agent.hostname}({agent.pk}). It will try again on next agent checkin", # type: ignore
) )
return "timeout" return "timeout"
else: else:
self.sync_status = "synced" self.sync_status = "synced"
self.save(update_fields=["sync_status"]) self.save(update_fields=["sync_status"])
logger.info(f"{agent.hostname} task {self.name} was successfully modified") DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"{agent.hostname} task {self.name} was successfully modified", # type: ignore
)
return "ok" return "ok"
@@ -362,7 +378,7 @@ class AutomatedTask(BaseAuditModel):
"func": "delschedtask", "func": "delschedtask",
"schedtaskpayload": {"name": self.win_task_name}, "schedtaskpayload": {"name": self.win_task_name},
} }
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) r = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) # type: ignore
if r != "ok" and "The system cannot find the file specified" not in r: if r != "ok" and "The system cannot find the file specified" not in r:
self.sync_status = "pendingdeletion" self.sync_status = "pendingdeletion"
@@ -372,13 +388,19 @@ class AutomatedTask(BaseAuditModel):
except DatabaseError: except DatabaseError:
pass pass
logger.warning( DebugLog.warning(
f"{agent.hostname} task {self.name} will be deleted on next checkin" agent=agent,
log_type="agent_issues",
message=f"{agent.hostname} task {self.name} will be deleted on next checkin", # type: ignore
) )
return "timeout" return "timeout"
else: else:
self.delete() self.delete()
logger.info(f"{agent.hostname} task {self.name} was deleted") DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"{agent.hostname}({agent.pk}) task {self.name} was deleted", # type: ignore
)
return "ok" return "ok"
@@ -391,9 +413,20 @@ class AutomatedTask(BaseAuditModel):
.first() .first()
) )
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False)) asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False)) # type: ignore
return "ok" return "ok"
def save_collector_results(self):
agent_field = self.custom_field.get_or_create_field_value(self.agent)
value = (
self.stdout.strip()
if self.collector_all_output
else self.stdout.strip().split("\n")[-1].strip()
)
agent_field.save_to_field(value)
def should_create_alert(self, alert_template=None): def should_create_alert(self, alert_template=None):
return ( return (
self.dashboard_alert self.dashboard_alert
@@ -424,7 +457,7 @@ class AutomatedTask(BaseAuditModel):
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
) )
CORE.send_mail(subject, body, self.agent.alert_template) CORE.send_mail(subject, body, self.agent.alert_template) # type: ignore
def send_sms(self): def send_sms(self):
from core.models import CoreSettings from core.models import CoreSettings
@@ -441,7 +474,7 @@ class AutomatedTask(BaseAuditModel):
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
) )
CORE.send_sms(body, alert_template=self.agent.alert_template) CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore
def send_resolved_email(self): def send_resolved_email(self):
from core.models import CoreSettings from core.models import CoreSettings
@@ -453,7 +486,7 @@ class AutomatedTask(BaseAuditModel):
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
) )
CORE.send_mail(subject, body, alert_template=self.agent.alert_template) CORE.send_mail(subject, body, alert_template=self.agent.alert_template) # type: ignore
def send_resolved_sms(self): def send_resolved_sms(self):
from core.models import CoreSettings from core.models import CoreSettings
@@ -464,4 +497,4 @@ class AutomatedTask(BaseAuditModel):
subject subject
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
) )
CORE.send_sms(body, alert_template=self.agent.alert_template) CORE.send_sms(body, alert_template=self.agent.alert_template) # type: ignore

View File

@@ -84,3 +84,9 @@ class TaskRunnerPatchSerializer(serializers.ModelSerializer):
class Meta: class Meta:
model = AutomatedTask model = AutomatedTask
fields = "__all__" fields = "__all__"
class TaskAuditSerializer(serializers.ModelSerializer):
class Meta:
model = AutomatedTask
fields = "__all__"

View File

@@ -1,18 +1,16 @@
import asyncio import asyncio
import datetime as dt import datetime as dt
from logging import log
import random import random
from time import sleep from time import sleep
from typing import Union from typing import Union
from django.conf import settings
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger
from autotasks.models import AutomatedTask from autotasks.models import AutomatedTask
from logs.models import DebugLog
from tacticalrmm.celery import app from tacticalrmm.celery import app
logger.configure(**settings.LOG_CONFIG)
@app.task @app.task
def create_win_task_schedule(pk): def create_win_task_schedule(pk):
@@ -53,12 +51,20 @@ def remove_orphaned_win_tasks(agentpk):
agent = Agent.objects.get(pk=agentpk) agent = Agent.objects.get(pk=agentpk)
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.") DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"Orphaned task cleanup initiated on {agent.hostname}.",
)
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10)) r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
if not isinstance(r, list) and not r: # empty list if not isinstance(r, list) and not r: # empty list
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}") DebugLog.error(
agent=agent,
log_type="agent_issues",
message=f"Unable to clean up scheduled tasks on {agent.hostname}: {r}",
)
return "notlist" return "notlist"
agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True)) agent_task_names = list(agent.autotasks.values_list("win_task_name", flat=True))
@@ -83,13 +89,23 @@ def remove_orphaned_win_tasks(agentpk):
} }
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10)) ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
if ret != "ok": if ret != "ok":
logger.error( DebugLog.error(
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}" agent=agent,
log_type="agent_issues",
message=f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}",
) )
else: else:
logger.info(f"Removed orphaned task {task} from {agent.hostname}") DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"Removed orphaned task {task} from {agent.hostname}",
)
logger.info(f"Orphaned task cleanup finished on {agent.hostname}") DebugLog.info(
agent=agent,
log_type="agent_issues",
message=f"Orphaned task cleanup finished on {agent.hostname}",
)
@app.task @app.task

View File

@@ -12,10 +12,6 @@ from django.contrib.postgres.fields import ArrayField
from django.core.validators import MaxValueValidator, MinValueValidator from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models from django.db import models
from logs.models import BaseAuditModel from logs.models import BaseAuditModel
from loguru import logger
logger.configure(**settings.LOG_CONFIG)
CHECK_TYPE_CHOICES = [ CHECK_TYPE_CHOICES = [
("diskspace", "Disk Space Check"), ("diskspace", "Disk Space Check"),
@@ -475,9 +471,9 @@ class Check(BaseAuditModel):
@staticmethod @staticmethod
def serialize(check): def serialize(check):
# serializes the check and returns json # serializes the check and returns json
from .serializers import CheckSerializer from .serializers import CheckAuditSerializer
return CheckSerializer(check).data return CheckAuditSerializer(check).data
# for policy diskchecks # for policy diskchecks
@staticmethod @staticmethod

View File

@@ -220,3 +220,9 @@ class CheckHistorySerializer(serializers.ModelSerializer):
class Meta: class Meta:
model = CheckHistory model = CheckHistory
fields = ("x", "y", "results") fields = ("x", "y", "results")
class CheckAuditSerializer(serializers.ModelSerializer):
class Meta:
model = Check
fields = "__all__"

View File

@@ -33,13 +33,17 @@ class Client(BaseAuditModel):
blank=True, blank=True,
) )
def save(self, *args, **kw): def save(self, *args, **kwargs):
from alerts.tasks import cache_agents_alert_template from alerts.tasks import cache_agents_alert_template
from automation.tasks import generate_agent_checks_task from automation.tasks import generate_agent_checks_task
# get old client if exists # get old client if exists
old_client = type(self).objects.get(pk=self.pk) if self.pk else None old_client = Client.objects.get(pk=self.pk) if self.pk else None
super(BaseAuditModel, self).save(*args, **kw) super(Client, self).save(
old_model=old_client,
*args,
**kwargs,
)
# check if polcies have changed and initiate task to reapply policies if so # check if polcies have changed and initiate task to reapply policies if so
if old_client: if old_client:
@@ -50,7 +54,6 @@ class Client(BaseAuditModel):
old_client.block_policy_inheritance != self.block_policy_inheritance old_client.block_policy_inheritance != self.block_policy_inheritance
) )
): ):
generate_agent_checks_task.delay( generate_agent_checks_task.delay(
client=self.pk, client=self.pk,
create_tasks=True, create_tasks=True,
@@ -120,10 +123,10 @@ class Client(BaseAuditModel):
@staticmethod @staticmethod
def serialize(client): def serialize(client):
# serializes the client and returns json from .serializers import ClientAuditSerializer
from .serializers import ClientSerializer
return ClientSerializer(client).data # serializes the client and returns json
return ClientAuditSerializer(client).data
class Site(BaseAuditModel): class Site(BaseAuditModel):
@@ -153,13 +156,17 @@ class Site(BaseAuditModel):
blank=True, blank=True,
) )
def save(self, *args, **kw): def save(self, *args, **kwargs):
from alerts.tasks import cache_agents_alert_template from alerts.tasks import cache_agents_alert_template
from automation.tasks import generate_agent_checks_task from automation.tasks import generate_agent_checks_task
# get old client if exists # get old client if exists
old_site = type(self).objects.get(pk=self.pk) if self.pk else None old_site = Site.objects.get(pk=self.pk) if self.pk else None
super(Site, self).save(*args, **kw) super(Site, self).save(
old_model=old_site,
*args,
**kwargs,
)
# check if polcies have changed and initiate task to reapply policies if so # check if polcies have changed and initiate task to reapply policies if so
if old_site: if old_site:
@@ -168,11 +175,10 @@ class Site(BaseAuditModel):
or (old_site.workstation_policy != self.workstation_policy) or (old_site.workstation_policy != self.workstation_policy)
or (old_site.block_policy_inheritance != self.block_policy_inheritance) or (old_site.block_policy_inheritance != self.block_policy_inheritance)
): ):
generate_agent_checks_task.delay(site=self.pk, create_tasks=True) generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
if old_site.alert_template != self.alert_template: if old_site.alert_template != self.alert_template:
cache_agents_alert_template.delay() cache_agents_alert_template.delay()
class Meta: class Meta:
ordering = ("name",) ordering = ("name",)
@@ -233,10 +239,10 @@ class Site(BaseAuditModel):
@staticmethod @staticmethod
def serialize(site): def serialize(site):
# serializes the site and returns json from .serializers import SiteAuditSerializer
from .serializers import SiteSerializer
return SiteSerializer(site).data # serializes the site and returns json
return SiteAuditSerializer(site).data
MON_TYPE_CHOICES = [ MON_TYPE_CHOICES = [
@@ -308,6 +314,22 @@ class ClientCustomField(models.Model):
else: else:
return self.string_value return self.string_value
def save_to_field(self, value):
if self.field.type in [
"text",
"number",
"single",
"datetime",
]:
self.string_value = value
self.save()
elif type == "multiple":
self.multiple_value = value.split(",")
self.save()
elif type == "checkbox":
self.bool_value = bool(value)
self.save()
class SiteCustomField(models.Model): class SiteCustomField(models.Model):
site = models.ForeignKey( site = models.ForeignKey(
@@ -342,3 +364,19 @@ class SiteCustomField(models.Model):
return self.bool_value return self.bool_value
else: else:
return self.string_value return self.string_value
def save_to_field(self, value):
if self.field.type in [
"text",
"number",
"single",
"datetime",
]:
self.string_value = value
self.save()
elif type == "multiple":
self.multiple_value = value.split(",")
self.save()
elif type == "checkbox":
self.bool_value = bool(value)
self.save()

View File

@@ -1,4 +1,10 @@
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError from django.db.models.base import Model
from rest_framework.serializers import (
ModelSerializer,
ReadOnlyField,
Serializer,
ValidationError,
)
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
@@ -134,3 +140,15 @@ class DeploymentSerializer(ModelSerializer):
"install_flags", "install_flags",
"created", "created",
] ]
class SiteAuditSerializer(ModelSerializer):
class Meta:
model = Site
fields = "__all__"
class ClientAuditSerializer(ModelSerializer):
class Meta:
model = Client
fields = "__all__"

View File

@@ -3,10 +3,8 @@ import re
import uuid import uuid
import pytz import pytz
from django.conf import settings
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger
from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
@@ -26,8 +24,6 @@ from .serializers import (
SiteSerializer, SiteSerializer,
) )
logger.configure(**settings.LOG_CONFIG)
class GetAddClients(APIView): class GetAddClients(APIView):
permission_classes = [IsAuthenticated, ManageClientsPerms] permission_classes = [IsAuthenticated, ManageClientsPerms]

View File

@@ -2,6 +2,7 @@ from django.core.management.base import BaseCommand
from logs.models import PendingAction from logs.models import PendingAction
from scripts.models import Script from scripts.models import Script
from accounts.models import User
class Command(BaseCommand): class Command(BaseCommand):
@@ -13,3 +14,9 @@ class Command(BaseCommand):
# load community scripts into the db # load community scripts into the db
Script.load_community_scripts() Script.load_community_scripts()
# make sure installer user is set to block_dashboard_logins
if User.objects.filter(is_installer_user=True).exists():
for user in User.objects.filter(is_installer_user=True):
user.block_dashboard_login = True
user.save()

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.2.1 on 2021-07-07 18:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0023_coresettings_clear_faults_days'),
]
operations = [
migrations.AddField(
model_name='coresettings',
name='agent_history_prune_days',
field=models.PositiveIntegerField(default=30),
),
migrations.AddField(
model_name='coresettings',
name='resolved_alerts_prune_days',
field=models.PositiveIntegerField(default=0),
),
]

View File

@@ -0,0 +1,28 @@
# Generated by Django 3.2.1 on 2021-07-07 18:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0024_auto_20210707_1828'),
]
operations = [
migrations.AddField(
model_name='coresettings',
name='agent_debug_level',
field=models.CharField(choices=[('info', 'Info'), ('warning', 'Warning'), ('error', 'Error'), ('critical', 'Critical')], default='info', max_length=20),
),
migrations.AddField(
model_name='coresettings',
name='debug_log_prune_days',
field=models.PositiveIntegerField(default=30),
),
migrations.AlterField(
model_name='coresettings',
name='agent_history_prune_days',
field=models.PositiveIntegerField(default=60),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2.1 on 2021-07-21 17:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0025_auto_20210707_1835'),
]
operations = [
migrations.AddField(
model_name='coresettings',
name='audit_log_prune_days',
field=models.PositiveIntegerField(default=0),
),
]

View File

@@ -0,0 +1,73 @@
# Generated by Django 3.2.6 on 2021-09-05 16:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0026_coresettings_audit_log_prune_days'),
]
operations = [
migrations.AddField(
model_name='customfield',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='customfield',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='customfield',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='customfield',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='globalkvstore',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='urlaction',
name='created_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='urlaction',
name='created_time',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='urlaction',
name='modified_by',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='urlaction',
name='modified_time',
field=models.DateTimeField(auto_now=True, null=True),
),
]

View File

@@ -1,17 +1,15 @@
import smtplib import smtplib
from email.message import EmailMessage from email.message import EmailMessage
from django.db.models.enums import Choices
import pytz import pytz
from django.conf import settings from django.conf import settings
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.db import models from django.db import models
from loguru import logger
from twilio.rest import Client as TwClient from twilio.rest import Client as TwClient
from logs.models import BaseAuditModel from logs.models import BaseAuditModel, DebugLog, LOG_LEVEL_CHOICES
logger.configure(**settings.LOG_CONFIG)
TZ_CHOICES = [(_, _) for _ in pytz.all_timezones] TZ_CHOICES = [(_, _) for _ in pytz.all_timezones]
@@ -51,6 +49,13 @@ class CoreSettings(BaseAuditModel):
) )
# removes check history older than days # removes check history older than days
check_history_prune_days = models.PositiveIntegerField(default=30) check_history_prune_days = models.PositiveIntegerField(default=30)
resolved_alerts_prune_days = models.PositiveIntegerField(default=0)
agent_history_prune_days = models.PositiveIntegerField(default=60)
debug_log_prune_days = models.PositiveIntegerField(default=30)
audit_log_prune_days = models.PositiveIntegerField(default=0)
agent_debug_level = models.CharField(
max_length=20, choices=LOG_LEVEL_CHOICES, default="info"
)
clear_faults_days = models.IntegerField(default=0) clear_faults_days = models.IntegerField(default=0)
mesh_token = models.CharField(max_length=255, null=True, blank=True, default="") mesh_token = models.CharField(max_length=255, null=True, blank=True, default="")
mesh_username = models.CharField(max_length=255, null=True, blank=True, default="") mesh_username = models.CharField(max_length=255, null=True, blank=True, default="")
@@ -184,14 +189,14 @@ class CoreSettings(BaseAuditModel):
server.quit() server.quit()
except Exception as e: except Exception as e:
logger.error(f"Sending email failed with error: {e}") DebugLog.error(message=f"Sending email failed with error: {e}")
if test: if test:
return str(e) return str(e)
else: else:
return True return True
def send_sms(self, body, alert_template=None): def send_sms(self, body, alert_template=None):
if not alert_template and not self.sms_is_configured: if not alert_template or not self.sms_is_configured:
return return
# override email recipients if alert_template is passed and is set # override email recipients if alert_template is passed and is set
@@ -205,7 +210,7 @@ class CoreSettings(BaseAuditModel):
try: try:
tw_client.messages.create(body=body, to=num, from_=self.twilio_number) tw_client.messages.create(body=body, to=num, from_=self.twilio_number)
except Exception as e: except Exception as e:
logger.error(f"SMS failed to send: {e}") DebugLog.error(message=f"SMS failed to send: {e}")
@staticmethod @staticmethod
def serialize(core): def serialize(core):
@@ -227,7 +232,7 @@ FIELD_TYPE_CHOICES = (
MODEL_CHOICES = (("client", "Client"), ("site", "Site"), ("agent", "Agent")) MODEL_CHOICES = (("client", "Client"), ("site", "Site"), ("agent", "Agent"))
class CustomField(models.Model): class CustomField(BaseAuditModel):
order = models.PositiveIntegerField(default=0) order = models.PositiveIntegerField(default=0)
model = models.CharField(max_length=25, choices=MODEL_CHOICES) model = models.CharField(max_length=25, choices=MODEL_CHOICES)
@@ -256,6 +261,12 @@ class CustomField(models.Model):
def __str__(self): def __str__(self):
return self.name return self.name
@staticmethod
def serialize(field):
from .serializers import CustomFieldSerializer
return CustomFieldSerializer(field).data
@property @property
def default_value(self): def default_value(self):
if self.type == "multiple": if self.type == "multiple":
@@ -265,6 +276,26 @@ class CustomField(models.Model):
else: else:
return self.default_value_string return self.default_value_string
def get_or_create_field_value(self, instance):
from agents.models import Agent, AgentCustomField
from clients.models import Client, ClientCustomField, Site, SiteCustomField
if isinstance(instance, Agent):
if AgentCustomField.objects.filter(field=self, agent=instance).exists():
return AgentCustomField.objects.get(field=self, agent=instance)
else:
return AgentCustomField.objects.create(field=self, agent=instance)
elif isinstance(instance, Client):
if ClientCustomField.objects.filter(field=self, client=instance).exists():
return ClientCustomField.objects.get(field=self, client=instance)
else:
return ClientCustomField.objects.create(field=self, client=instance)
elif isinstance(instance, Site):
if SiteCustomField.objects.filter(field=self, site=instance).exists():
return SiteCustomField.objects.get(field=self, site=instance)
else:
return SiteCustomField.objects.create(field=self, site=instance)
class CodeSignToken(models.Model): class CodeSignToken(models.Model):
token = models.CharField(max_length=255, null=True, blank=True) token = models.CharField(max_length=255, null=True, blank=True)
@@ -279,19 +310,34 @@ class CodeSignToken(models.Model):
return "Code signing token" return "Code signing token"
class GlobalKVStore(models.Model): class GlobalKVStore(BaseAuditModel):
name = models.CharField(max_length=25) name = models.CharField(max_length=25)
value = models.TextField() value = models.TextField()
def __str__(self): def __str__(self):
return self.name return self.name
@staticmethod
def serialize(store):
from .serializers import KeyStoreSerializer
class URLAction(models.Model): return KeyStoreSerializer(store).data
class URLAction(BaseAuditModel):
name = models.CharField(max_length=25) name = models.CharField(max_length=25)
desc = models.CharField(max_length=100, null=True, blank=True) desc = models.CharField(max_length=100, null=True, blank=True)
pattern = models.TextField() pattern = models.TextField()
def __str__(self):
return self.name
@staticmethod
def serialize(action):
from .serializers import URLActionSerializer
return URLActionSerializer(action).data
RUN_ON_CHOICES = ( RUN_ON_CHOICES = (
("client", "Client"), ("client", "Client"),

View File

@@ -1,17 +1,15 @@
import pytz import pytz
from django.conf import settings
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger
from autotasks.models import AutomatedTask from autotasks.models import AutomatedTask
from autotasks.tasks import delete_win_task_schedule from autotasks.tasks import delete_win_task_schedule
from checks.tasks import prune_check_history from checks.tasks import prune_check_history
from agents.tasks import clear_faults_task from agents.tasks import clear_faults_task, prune_agent_history
from alerts.tasks import prune_resolved_alerts
from core.models import CoreSettings from core.models import CoreSettings
from logs.tasks import prune_debug_log, prune_audit_log
from tacticalrmm.celery import app from tacticalrmm.celery import app
logger.configure(**settings.LOG_CONFIG)
@app.task @app.task
def core_maintenance_tasks(): def core_maintenance_tasks():
@@ -32,18 +30,37 @@ def core_maintenance_tasks():
core = CoreSettings.objects.first() core = CoreSettings.objects.first()
# remove old CheckHistory data # remove old CheckHistory data
if core.check_history_prune_days > 0: if core.check_history_prune_days > 0: # type: ignore
prune_check_history.delay(core.check_history_prune_days) prune_check_history.delay(core.check_history_prune_days) # type: ignore
# remove old resolved alerts
if core.resolved_alerts_prune_days > 0: # type: ignore
prune_resolved_alerts.delay(core.resolved_alerts_prune_days) # type: ignore
# remove old agent history
if core.agent_history_prune_days > 0: # type: ignore
prune_agent_history.delay(core.agent_history_prune_days) # type: ignore
# remove old debug logs
if core.debug_log_prune_days > 0: # type: ignore
prune_debug_log.delay(core.debug_log_prune_days) # type: ignore
# remove old audit logs
if core.audit_log_prune_days > 0: # type: ignore
prune_audit_log.delay(core.audit_log_prune_days) # type: ignore
# clear faults # clear faults
if core.clear_faults_days > 0: if core.clear_faults_days > 0: # type: ignore
clear_faults_task.delay(core.clear_faults_days) clear_faults_task.delay(core.clear_faults_days) # type: ignore
@app.task @app.task
def cache_db_fields_task(): def cache_db_fields_task():
from agents.models import Agent from agents.models import Agent
for agent in Agent.objects.all(): for agent in Agent.objects.prefetch_related("winupdates", "pendingactions").only(
"pending_actions_count", "has_patches_pending", "pk"
):
agent.pending_actions_count = agent.pendingactions.filter( agent.pending_actions_count = agent.pendingactions.filter(
status="pending" status="pending"
).count() ).count()

View File

@@ -3,7 +3,9 @@ import pprint
import re import re
from django.conf import settings from django.conf import settings
from django.db.models.fields import IPAddressField
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from logs.models import AuditLog
from rest_framework import status from rest_framework import status
from rest_framework.decorators import api_view, permission_classes from rest_framework.decorators import api_view, permission_classes
from rest_framework.exceptions import ParseError from rest_framework.exceptions import ParseError
@@ -346,9 +348,18 @@ class RunURLAction(APIView):
from requests.utils import requote_uri from requests.utils import requote_uri
from agents.models import Agent from agents.models import Agent
from clients.models import Client, Site
from tacticalrmm.utils import replace_db_values from tacticalrmm.utils import replace_db_values
agent = get_object_or_404(Agent, pk=request.data["agent"]) if "agent" in request.data.keys():
instance = get_object_or_404(Agent, pk=request.data["agent"])
elif "site" in request.data.keys():
instance = get_object_or_404(Site, pk=request.data["site"])
elif "client" in request.data.keys():
instance = get_object_or_404(Client, pk=request.data["client"])
else:
return notify_error("received an incorrect request")
action = get_object_or_404(URLAction, pk=request.data["action"]) action = get_object_or_404(URLAction, pk=request.data["action"])
pattern = re.compile("\\{\\{([\\w\\s]+\\.[\\w\\s]+)\\}\\}") pattern = re.compile("\\{\\{([\\w\\s]+\\.[\\w\\s]+)\\}\\}")
@@ -356,10 +367,17 @@ class RunURLAction(APIView):
url_pattern = action.pattern url_pattern = action.pattern
for string in re.findall(pattern, action.pattern): for string in re.findall(pattern, action.pattern):
value = replace_db_values(string=string, agent=agent, quotes=False) value = replace_db_values(string=string, instance=instance, quotes=False)
url_pattern = re.sub("\\{\\{" + string + "\\}\\}", str(value), url_pattern) url_pattern = re.sub("\\{\\{" + string + "\\}\\}", str(value), url_pattern)
AuditLog.audit_url_action(
username=request.user.username,
urlaction=action,
instance=instance,
debug_info={"ip": request._client_ip},
)
return Response(requote_uri(url_pattern)) return Response(requote_uri(url_pattern))

View File

@@ -1,6 +1,7 @@
from django.contrib import admin from django.contrib import admin
from .models import AuditLog, PendingAction from .models import AuditLog, PendingAction, DebugLog
admin.site.register(PendingAction) admin.site.register(PendingAction)
admin.site.register(AuditLog) admin.site.register(AuditLog)
admin.site.register(DebugLog)

View File

@@ -0,0 +1,68 @@
# Generated by Django 3.2.1 on 2021-06-14 18:35
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("logs", "0012_auto_20210228_0943"),
]
operations = [
migrations.AddField(
model_name="debuglog",
name="agent",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="debuglogs",
to="agents.agent",
),
),
migrations.AddField(
model_name="debuglog",
name="entry_time",
field=models.DateTimeField(
auto_now_add=True, default=django.utils.timezone.now
),
preserve_default=False,
),
migrations.AddField(
model_name="debuglog",
name="log_level",
field=models.CharField(
choices=[
("info", "Info"),
("warning", "Warning"),
("error", "Error"),
("critical", "Critical"),
],
default="info",
max_length=50,
),
),
migrations.AddField(
model_name="debuglog",
name="log_type",
field=models.CharField(
choices=[
("agent_update", "Agent Update"),
("agent_issues", "Agent Issues"),
("win_updates", "Windows Updates"),
("system_issues", "System Issues"),
("scripting", "Scripting"),
],
default="system_issues",
max_length=50,
),
),
migrations.AddField(
model_name="debuglog",
name="message",
field=models.TextField(blank=True, null=True),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2.1 on 2021-06-28 02:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0013_auto_20210614_1835'),
]
operations = [
migrations.AddField(
model_name='auditlog',
name='agent_id',
field=models.PositiveIntegerField(blank=True, null=True),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2.1 on 2021-07-21 04:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0014_auditlog_agent_id'),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='object_type',
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alert_template', 'Alert Template'), ('role', 'Role')], max_length=100),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.2.1 on 2021-07-21 17:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0015_alter_auditlog_object_type'),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='object_type',
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alerttemplate', 'Alert Template'), ('role', 'Role')], max_length=100),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.2.1 on 2021-07-31 17:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0016_alter_auditlog_object_type'),
]
operations = [
migrations.AddField(
model_name='pendingaction',
name='cancelable',
field=models.BooleanField(blank=True, default=False),
),
migrations.AlterField(
model_name='pendingaction',
name='action_type',
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update'), ('chocoinstall', 'Chocolatey Software Install'), ('runcmd', 'Run Command'), ('runscript', 'Run Script'), ('runpatchscan', 'Run Patch Scan'), ('runpatchinstall', 'Run Patch Install')], max_length=255, null=True),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.2.6 on 2021-09-05 16:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0017_auto_20210731_1707'),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='action',
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command'), ('bulk_action', 'Bulk Action'), ('url_action', 'URL Action')], max_length=100),
),
migrations.AlterField(
model_name='auditlog',
name='object_type',
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk'), ('alerttemplate', 'Alert Template'), ('role', 'Role'), ('urlaction', 'URL Action'), ('keystore', 'Global Key Store'), ('customfield', 'Custom Field')], max_length=100),
),
]

View File

@@ -2,14 +2,24 @@ import datetime as dt
from abc import abstractmethod from abc import abstractmethod
from django.db import models from django.db import models
from tacticalrmm.middleware import get_debug_info, get_username from tacticalrmm.middleware import get_debug_info, get_username
def get_debug_level():
from core.models import CoreSettings
return CoreSettings.objects.first().agent_debug_level # type: ignore
ACTION_TYPE_CHOICES = [ ACTION_TYPE_CHOICES = [
("schedreboot", "Scheduled Reboot"), ("schedreboot", "Scheduled Reboot"),
("taskaction", "Scheduled Task Action"), # deprecated ("taskaction", "Scheduled Task Action"), # deprecated
("agentupdate", "Agent Update"), ("agentupdate", "Agent Update"),
("chocoinstall", "Chocolatey Software Install"), ("chocoinstall", "Chocolatey Software Install"),
("runcmd", "Run Command"),
("runscript", "Run Script"),
("runpatchscan", "Run Patch Scan"),
("runpatchinstall", "Run Patch Install"),
] ]
AUDIT_ACTION_TYPE_CHOICES = [ AUDIT_ACTION_TYPE_CHOICES = [
@@ -26,6 +36,7 @@ AUDIT_ACTION_TYPE_CHOICES = [
("execute_script", "Execute Script"), ("execute_script", "Execute Script"),
("execute_command", "Execute Command"), ("execute_command", "Execute Command"),
("bulk_action", "Bulk Action"), ("bulk_action", "Bulk Action"),
("url_action", "URL Action"),
] ]
AUDIT_OBJECT_TYPE_CHOICES = [ AUDIT_OBJECT_TYPE_CHOICES = [
@@ -40,6 +51,11 @@ AUDIT_OBJECT_TYPE_CHOICES = [
("automatedtask", "Automated Task"), ("automatedtask", "Automated Task"),
("coresettings", "Core Settings"), ("coresettings", "Core Settings"),
("bulk", "Bulk"), ("bulk", "Bulk"),
("alerttemplate", "Alert Template"),
("role", "Role"),
("urlaction", "URL Action"),
("keystore", "Global Key Store"),
("customfield", "Custom Field"),
] ]
STATUS_CHOICES = [ STATUS_CHOICES = [
@@ -51,6 +67,7 @@ STATUS_CHOICES = [
class AuditLog(models.Model): class AuditLog(models.Model):
username = models.CharField(max_length=100) username = models.CharField(max_length=100)
agent = models.CharField(max_length=255, null=True, blank=True) agent = models.CharField(max_length=255, null=True, blank=True)
agent_id = models.PositiveIntegerField(blank=True, null=True)
entry_time = models.DateTimeField(auto_now_add=True) entry_time = models.DateTimeField(auto_now_add=True)
action = models.CharField(max_length=100, choices=AUDIT_ACTION_TYPE_CHOICES) action = models.CharField(max_length=100, choices=AUDIT_ACTION_TYPE_CHOICES)
object_type = models.CharField(max_length=100, choices=AUDIT_OBJECT_TYPE_CHOICES) object_type = models.CharField(max_length=100, choices=AUDIT_OBJECT_TYPE_CHOICES)
@@ -73,24 +90,25 @@ class AuditLog(models.Model):
return super(AuditLog, self).save(*args, **kwargs) return super(AuditLog, self).save(*args, **kwargs)
@staticmethod @staticmethod
def audit_mesh_session(username, hostname, debug_info={}): def audit_mesh_session(username, agent, debug_info={}):
AuditLog.objects.create( AuditLog.objects.create(
username=username, username=username,
agent=hostname, agent=agent.hostname,
agent_id=agent.id,
object_type="agent", object_type="agent",
action="remote_session", action="remote_session",
message=f"{username} used Mesh Central to initiate a remote session to {hostname}.", message=f"{username} used Mesh Central to initiate a remote session to {agent.hostname}.",
debug_info=debug_info, debug_info=debug_info,
) )
@staticmethod @staticmethod
def audit_raw_command(username, hostname, cmd, shell, debug_info={}): def audit_raw_command(username, agent, cmd, shell, debug_info={}):
AuditLog.objects.create( AuditLog.objects.create(
username=username, username=username,
agent=hostname, agent=agent.hostname,
object_type="agent", object_type="agent",
action="execute_command", action="execute_command",
message=f"{username} issued {shell} command on {hostname}.", message=f"{username} issued {shell} command on {agent.hostname}.",
after_value=cmd, after_value=cmd,
debug_info=debug_info, debug_info=debug_info,
) )
@@ -102,6 +120,7 @@ class AuditLog(models.Model):
AuditLog.objects.create( AuditLog.objects.create(
username=username, username=username,
object_type=object_type, object_type=object_type,
agent_id=before["id"] if object_type == "agent" else None,
action="modify", action="modify",
message=f"{username} modified {object_type} {name}", message=f"{username} modified {object_type} {name}",
before_value=before, before_value=before,
@@ -114,6 +133,7 @@ class AuditLog(models.Model):
AuditLog.objects.create( AuditLog.objects.create(
username=username, username=username,
object_type=object_type, object_type=object_type,
agent=after["id"] if object_type == "agent" else None,
action="add", action="add",
message=f"{username} added {object_type} {name}", message=f"{username} added {object_type} {name}",
after_value=after, after_value=after,
@@ -125,6 +145,7 @@ class AuditLog(models.Model):
AuditLog.objects.create( AuditLog.objects.create(
username=username, username=username,
object_type=object_type, object_type=object_type,
agent=before["id"] if object_type == "agent" else None,
action="delete", action="delete",
message=f"{username} deleted {object_type} {name}", message=f"{username} deleted {object_type} {name}",
before_value=before, before_value=before,
@@ -132,13 +153,14 @@ class AuditLog(models.Model):
) )
@staticmethod @staticmethod
def audit_script_run(username, hostname, script, debug_info={}): def audit_script_run(username, agent, script, debug_info={}):
AuditLog.objects.create( AuditLog.objects.create(
agent=hostname, agent=agent.hostname,
agent_id=agent.id,
username=username, username=username,
object_type="agent", object_type="agent",
action="execute_script", action="execute_script",
message=f'{username} ran script: "{script}" on {hostname}', message=f'{username} ran script: "{script}" on {agent.hostname}',
debug_info=debug_info, debug_info=debug_info,
) )
@@ -172,6 +194,21 @@ class AuditLog(models.Model):
debug_info=debug_info, debug_info=debug_info,
) )
@staticmethod
def audit_url_action(username, urlaction, instance, debug_info={}):
name = instance.hostname if hasattr(instance, "hostname") else instance.name
classname = type(instance).__name__
AuditLog.objects.create(
username=username,
agent=instance.hostname if classname == "Agent" else None,
agent_id=instance.id if classname == "Agent" else None,
object_type=classname.lower(),
action="url_action",
message=f"{username} ran url action: {urlaction.pattern} on {classname}: {name}",
debug_info=debug_info,
)
@staticmethod @staticmethod
def audit_bulk_action(username, action, affected, debug_info={}): def audit_bulk_action(username, action, affected, debug_info={}):
from agents.models import Agent from agents.models import Agent
@@ -190,13 +227,13 @@ class AuditLog(models.Model):
site = Site.objects.get(pk=affected["site"]) site = Site.objects.get(pk=affected["site"])
target = f"on all agents within site: {site.client.name}\\{site.name}" target = f"on all agents within site: {site.client.name}\\{site.name}"
elif affected["target"] == "agents": elif affected["target"] == "agents":
agents = Agent.objects.filter(pk__in=affected["agentPKs"]).values_list( agents = Agent.objects.filter(pk__in=affected["agents"]).values_list(
"hostname", flat=True "hostname", flat=True
) )
target = "on multiple agents" target = "on multiple agents"
if action == "script": if action == "script":
script = Script.objects.get(pk=affected["scriptPK"]) script = Script.objects.get(pk=affected["script"])
action = f"script: {script.name}" action = f"script: {script.name}"
if agents: if agents:
@@ -212,8 +249,71 @@ class AuditLog(models.Model):
) )
LOG_LEVEL_CHOICES = [
("info", "Info"),
("warning", "Warning"),
("error", "Error"),
("critical", "Critical"),
]
LOG_TYPE_CHOICES = [
("agent_update", "Agent Update"),
("agent_issues", "Agent Issues"),
("win_updates", "Windows Updates"),
("system_issues", "System Issues"),
("scripting", "Scripting"),
]
class DebugLog(models.Model): class DebugLog(models.Model):
pass entry_time = models.DateTimeField(auto_now_add=True)
agent = models.ForeignKey(
"agents.Agent",
related_name="debuglogs",
on_delete=models.CASCADE,
null=True,
blank=True,
)
log_level = models.CharField(
max_length=50, choices=LOG_LEVEL_CHOICES, default="info"
)
log_type = models.CharField(
max_length=50, choices=LOG_TYPE_CHOICES, default="system_issues"
)
message = models.TextField(null=True, blank=True)
@classmethod
def info(
cls,
message,
agent=None,
log_type="system_issues",
):
if get_debug_level() in ["info"]:
cls.objects.create(
log_level="info", agent=agent, log_type=log_type, message=message
)
@classmethod
def warning(cls, message, agent=None, log_type="system_issues"):
if get_debug_level() in ["info", "warning"]:
cls.objects.create(
log_level="warning", agent=agent, log_type=log_type, message=message
)
@classmethod
def error(cls, message, agent=None, log_type="system_issues"):
if get_debug_level() in ["info", "warning", "error"]:
cls.objects.create(
log_level="error", agent=agent, log_type=log_type, message=message
)
@classmethod
def critical(cls, message, agent=None, log_type="system_issues"):
if get_debug_level() in ["info", "warning", "error", "critical"]:
cls.objects.create(
log_level="critical", agent=agent, log_type=log_type, message=message
)
class PendingAction(models.Model): class PendingAction(models.Model):
@@ -232,6 +332,7 @@ class PendingAction(models.Model):
choices=STATUS_CHOICES, choices=STATUS_CHOICES,
default="pending", default="pending",
) )
cancelable = models.BooleanField(blank=True, default=False)
celery_id = models.CharField(null=True, blank=True, max_length=255) celery_id = models.CharField(null=True, blank=True, max_length=255)
details = models.JSONField(null=True, blank=True) details = models.JSONField(null=True, blank=True)
@@ -247,6 +348,8 @@ class PendingAction(models.Model):
return "Next update cycle" return "Next update cycle"
elif self.action_type == "chocoinstall": elif self.action_type == "chocoinstall":
return "ASAP" return "ASAP"
else:
return "On next checkin"
@property @property
def description(self): def description(self):
@@ -259,6 +362,14 @@ class PendingAction(models.Model):
elif self.action_type == "chocoinstall": elif self.action_type == "chocoinstall":
return f"{self.details['name']} software install" return f"{self.details['name']} software install"
elif self.action_type in [
"runcmd",
"runscript",
"runpatchscan",
"runpatchinstall",
]:
return f"{self.action_type}"
class BaseAuditModel(models.Model): class BaseAuditModel(models.Model):
# abstract base class for auditing models # abstract base class for auditing models
@@ -275,13 +386,14 @@ class BaseAuditModel(models.Model):
def serialize(): def serialize():
pass pass
def save(self, *args, **kwargs): def save(self, old_model=None, *args, **kwargs):
if get_username(): if get_username():
before_value = {}
object_class = type(self) object_class = type(self)
object_name = object_class.__name__.lower() object_name = object_class.__name__.lower()
username = get_username() username = get_username()
after_value = object_class.serialize(self) # type: ignore
# populate created_by and modified_by fields on instance # populate created_by and modified_by fields on instance
if not getattr(self, "created_by", None): if not getattr(self, "created_by", None):
@@ -289,32 +401,37 @@ class BaseAuditModel(models.Model):
if hasattr(self, "modified_by"): if hasattr(self, "modified_by"):
self.modified_by = username self.modified_by = username
# capture object properties before edit
if self.pk:
before_value = object_class.objects.get(pk=self.id)
# dont create entry for agent add since that is done in view # dont create entry for agent add since that is done in view
if not self.pk: if not self.pk:
AuditLog.audit_object_add( AuditLog.audit_object_add(
username, username,
object_name, object_name,
object_class.serialize(self), after_value, # type: ignore
self.__str__(), self.__str__(),
debug_info=get_debug_info(), debug_info=get_debug_info(),
) )
else: else:
AuditLog.audit_object_changed(
username,
object_class.__name__.lower(),
object_class.serialize(before_value),
object_class.serialize(self),
self.__str__(),
debug_info=get_debug_info(),
)
return super(BaseAuditModel, self).save(*args, **kwargs) if old_model:
before_value = object_class.serialize(old_model) # type: ignore
else:
before_value = object_class.serialize(object_class.objects.get(pk=self.pk)) # type: ignore
# only create an audit entry if the values have changed
if before_value != after_value: # type: ignore
AuditLog.audit_object_changed(
username,
object_class.__name__.lower(),
before_value,
after_value, # type: ignore
self.__str__(),
debug_info=get_debug_info(),
)
super(BaseAuditModel, self).save(*args, **kwargs)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
super(BaseAuditModel, self).delete(*args, **kwargs)
if get_username(): if get_username():
@@ -322,9 +439,7 @@ class BaseAuditModel(models.Model):
AuditLog.audit_object_delete( AuditLog.audit_object_delete(
get_username(), get_username(),
object_class.__name__.lower(), object_class.__name__.lower(),
object_class.serialize(self), object_class.serialize(self), # type: ignore
self.__str__(), self.__str__(),
debug_info=get_debug_info(), debug_info=get_debug_info(),
) )
return super(BaseAuditModel, self).delete(*args, **kwargs)

View File

@@ -1,25 +1,23 @@
from rest_framework import serializers from rest_framework import serializers
from tacticalrmm.utils import get_default_timezone
from .models import AuditLog, PendingAction from .models import AuditLog, DebugLog, PendingAction
class AuditLogSerializer(serializers.ModelSerializer): class AuditLogSerializer(serializers.ModelSerializer):
entry_time = serializers.SerializerMethodField(read_only=True) entry_time = serializers.SerializerMethodField(read_only=True)
ip_address = serializers.ReadOnlyField(source="debug_info.ip")
class Meta: class Meta:
model = AuditLog model = AuditLog
fields = "__all__" fields = "__all__"
def get_entry_time(self, log): def get_entry_time(self, log):
timezone = get_default_timezone() tz = self.context["default_tz"]
return log.entry_time.astimezone(timezone).strftime("%m %d %Y %H:%M:%S") return log.entry_time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")
class PendingActionSerializer(serializers.ModelSerializer): class PendingActionSerializer(serializers.ModelSerializer):
hostname = serializers.ReadOnlyField(source="agent.hostname") hostname = serializers.ReadOnlyField(source="agent.hostname")
salt_id = serializers.ReadOnlyField(source="agent.salt_id") salt_id = serializers.ReadOnlyField(source="agent.salt_id")
client = serializers.ReadOnlyField(source="agent.client.name") client = serializers.ReadOnlyField(source="agent.client.name")
@@ -30,3 +28,16 @@ class PendingActionSerializer(serializers.ModelSerializer):
class Meta: class Meta:
model = PendingAction model = PendingAction
fields = "__all__" fields = "__all__"
class DebugLogSerializer(serializers.ModelSerializer):
agent = serializers.ReadOnlyField(source="agent.hostname")
entry_time = serializers.SerializerMethodField(read_only=True)
class Meta:
model = DebugLog
fields = "__all__"
def get_entry_time(self, log):
tz = self.context["default_tz"]
return log.entry_time.astimezone(tz).strftime("%m %d %Y %H:%M:%S")

View File

@@ -0,0 +1,25 @@
from django.utils import timezone as djangotime
from tacticalrmm.celery import app
@app.task
def prune_debug_log(older_than_days: int) -> str:
from .models import DebugLog
DebugLog.objects.filter(
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
).delete()
return "ok"
@app.task
def prune_audit_log(older_than_days: int) -> str:
from .models import AuditLog
AuditLog.objects.filter(
entry_time__lt=djangotime.now() - djangotime.timedelta(days=older_than_days)
).delete()
return "ok"

View File

@@ -1,10 +1,11 @@
from datetime import datetime, timedelta from itertools import cycle
from unittest.mock import patch from unittest.mock import patch
from django.utils import timezone as djangotime
from model_bakery import baker, seq from model_bakery import baker, seq
from tacticalrmm.test import TacticalTestCase
from logs.models import PendingAction from logs.models import PendingAction
from tacticalrmm.test import TacticalTestCase
class TestAuditViews(TacticalTestCase): class TestAuditViews(TacticalTestCase):
@@ -16,20 +17,23 @@ class TestAuditViews(TacticalTestCase):
# create clients for client filter # create clients for client filter
site = baker.make("clients.Site") site = baker.make("clients.Site")
baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1") agent1 = baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
agent2 = baker.make_recipe("agents.agent", hostname="AgentHostname2")
agent0 = baker.make_recipe("agents.agent", hostname="AgentHostname")
# user jim agent logs # user jim agent logs
baker.make_recipe( baker.make_recipe(
"logs.agent_logs", "logs.agent_logs",
username="jim", username="jim",
agent="AgentHostname1", agent="AgentHostname1",
entry_time=seq(datetime.now(), timedelta(days=3)), agent_id=agent1.id,
_quantity=15, _quantity=15,
) )
baker.make_recipe( baker.make_recipe(
"logs.agent_logs", "logs.agent_logs",
username="jim", username="jim",
agent="AgentHostname2", agent="AgentHostname2",
entry_time=seq(datetime.now(), timedelta(days=100)), agent_id=agent2.id,
_quantity=8, _quantity=8,
) )
@@ -38,14 +42,14 @@ class TestAuditViews(TacticalTestCase):
"logs.agent_logs", "logs.agent_logs",
username="james", username="james",
agent="AgentHostname1", agent="AgentHostname1",
entry_time=seq(datetime.now(), timedelta(days=55)), agent_id=agent1.id,
_quantity=7, _quantity=7,
) )
baker.make_recipe( baker.make_recipe(
"logs.agent_logs", "logs.agent_logs",
username="james", username="james",
agent="AgentHostname2", agent="AgentHostname2",
entry_time=seq(datetime.now(), timedelta(days=20)), agent_id=agent2.id,
_quantity=10, _quantity=10,
) )
@@ -53,7 +57,7 @@ class TestAuditViews(TacticalTestCase):
baker.make_recipe( baker.make_recipe(
"logs.agent_logs", "logs.agent_logs",
agent=seq("AgentHostname"), agent=seq("AgentHostname"),
entry_time=seq(datetime.now(), timedelta(days=29)), agent_id=seq(agent1.id),
_quantity=5, _quantity=5,
) )
@@ -61,7 +65,6 @@ class TestAuditViews(TacticalTestCase):
baker.make_recipe( baker.make_recipe(
"logs.object_logs", "logs.object_logs",
username="james", username="james",
entry_time=seq(datetime.now(), timedelta(days=5)),
_quantity=17, _quantity=17,
) )
@@ -69,7 +72,6 @@ class TestAuditViews(TacticalTestCase):
baker.make_recipe( baker.make_recipe(
"logs.login_logs", "logs.login_logs",
username="james", username="james",
entry_time=seq(datetime.now(), timedelta(days=7)),
_quantity=11, _quantity=11,
) )
@@ -77,51 +79,62 @@ class TestAuditViews(TacticalTestCase):
baker.make_recipe( baker.make_recipe(
"logs.login_logs", "logs.login_logs",
username="jim", username="jim",
entry_time=seq(datetime.now(), timedelta(days=11)),
_quantity=13, _quantity=13,
) )
return site return {"site": site, "agents": [agent0, agent1, agent2]}
def test_get_audit_logs(self): def test_get_audit_logs(self):
url = "/logs/auditlogs/" url = "/logs/auditlogs/"
# create data # create data
site = self.create_audit_records() data = self.create_audit_records()
# test data and result counts # test data and result counts
data = [ data = [
{"filter": {"timeFilter": 30}, "count": 86}, {"filter": {"timeFilter": 30}, "count": 86},
{ {
"filter": {"timeFilter": 45, "agentFilter": ["AgentHostname2"]}, "filter": {
"timeFilter": 45,
"agentFilter": [data["agents"][2].id],
},
"count": 19, "count": 19,
}, },
{ {
"filter": {"userFilter": ["jim"], "agentFilter": ["AgentHostname1"]}, "filter": {
"userFilter": ["jim"],
"agentFilter": [data["agents"][1].id],
},
"count": 15, "count": 15,
}, },
{ {
"filter": { "filter": {
"timeFilter": 180, "timeFilter": 180,
"userFilter": ["james"], "userFilter": ["james"],
"agentFilter": ["AgentHostname1"], "agentFilter": [data["agents"][1].id],
}, },
"count": 7, "count": 7,
}, },
{"filter": {}, "count": 86}, {"filter": {}, "count": 86},
{"filter": {"agentFilter": ["DoesntExist"]}, "count": 0}, {"filter": {"agentFilter": [500]}, "count": 0},
{ {
"filter": { "filter": {
"timeFilter": 35, "timeFilter": 35,
"userFilter": ["james", "jim"], "userFilter": ["james", "jim"],
"agentFilter": ["AgentHostname1", "AgentHostname2"], "agentFilter": [
data["agents"][1].id,
data["agents"][2].id,
],
}, },
"count": 40, "count": 40,
}, },
{"filter": {"timeFilter": 35, "userFilter": ["james", "jim"]}, "count": 81}, {"filter": {"timeFilter": 35, "userFilter": ["james", "jim"]}, "count": 81},
{"filter": {"objectFilter": ["user"]}, "count": 26}, {"filter": {"objectFilter": ["user"]}, "count": 26},
{"filter": {"actionFilter": ["login"]}, "count": 12}, {"filter": {"actionFilter": ["login"]}, "count": 12},
{"filter": {"clientFilter": [site.client.id]}, "count": 23}, {
"filter": {"clientFilter": [data["site"].client.id]},
"count": 23,
},
] ]
pagination = { pagination = {
@@ -137,45 +150,15 @@ class TestAuditViews(TacticalTestCase):
) )
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
self.assertEqual( self.assertEqual(
len(resp.data["audit_logs"]), len(resp.data["audit_logs"]), # type:ignore
pagination["rowsPerPage"] pagination["rowsPerPage"]
if req["count"] > pagination["rowsPerPage"] if req["count"] > pagination["rowsPerPage"]
else req["count"], else req["count"],
) )
self.assertEqual(resp.data["total"], req["count"]) self.assertEqual(resp.data["total"], req["count"]) # type:ignore
self.check_not_authenticated("patch", url) self.check_not_authenticated("patch", url)
def test_options_filter(self):
url = "/logs/auditlogs/optionsfilter/"
baker.make_recipe("agents.agent", hostname=seq("AgentHostname"), _quantity=5)
baker.make_recipe("agents.agent", hostname=seq("Server"), _quantity=3)
baker.make("accounts.User", username=seq("Username"), _quantity=7)
baker.make("accounts.User", username=seq("soemthing"), _quantity=3)
data = [
{"req": {"type": "agent", "pattern": "AgeNt"}, "count": 5},
{"req": {"type": "agent", "pattern": "AgentHostname1"}, "count": 1},
{"req": {"type": "agent", "pattern": "hasjhd"}, "count": 0},
{"req": {"type": "user", "pattern": "UsEr"}, "count": 7},
{"req": {"type": "user", "pattern": "UserName1"}, "count": 1},
{"req": {"type": "user", "pattern": "dfdsadf"}, "count": 0},
]
for req in data:
resp = self.client.post(url, req["req"], format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), req["count"])
# test for invalid payload. needs to have either type: user or agent
invalid_data = {"type": "object", "pattern": "SomeString"}
resp = self.client.post(url, invalid_data, format="json")
self.assertEqual(resp.status_code, 400)
self.check_not_authenticated("post", url)
def test_get_pending_actions(self): def test_get_pending_actions(self):
url = "/logs/pendingactions/" url = "/logs/pendingactions/"
agent1 = baker.make_recipe("agents.online_agent") agent1 = baker.make_recipe("agents.online_agent")
@@ -270,3 +253,87 @@ class TestAuditViews(TacticalTestCase):
self.assertEqual(r.data, "error deleting sched task") # type: ignore self.assertEqual(r.data, "error deleting sched task") # type: ignore
self.check_not_authenticated("delete", url) self.check_not_authenticated("delete", url)
def test_get_debug_log(self):
url = "/logs/debuglog/"
# create data
agent = baker.make_recipe("agents.agent")
baker.make(
"logs.DebugLog",
log_level=cycle(["error", "info", "warning", "critical"]),
log_type="agent_issues",
agent=agent,
_quantity=4,
)
logs = baker.make(
"logs.DebugLog",
log_type="system_issues",
log_level=cycle(["error", "info", "warning", "critical"]),
_quantity=15,
)
# test agent filter
data = {"agentFilter": agent.id}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 4) # type: ignore
# test log type filter and agent
data = {"agentFilter": agent.id, "logLevelFilter": "warning"}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 1) # type: ignore
# test time filter with other
data = {"logTypeFilter": "system_issues", "logLevelFilter": "error"}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.data), 4) # type: ignore
self.check_not_authenticated("patch", url)
class TestLogTasks(TacticalTestCase):
def test_prune_debug_log(self):
from .models import DebugLog
from .tasks import prune_debug_log
# setup data
debug_log = baker.make(
"logs.DebugLog",
_quantity=50,
)
days = 0
for item in debug_log: # type:ignore
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
item.save()
days = days + 5
# delete AgentHistory older than 30 days
prune_debug_log(30)
self.assertEqual(DebugLog.objects.count(), 6)
def test_prune_audit_log(self):
from .models import AuditLog
from .tasks import prune_audit_log
# setup data
audit_log = baker.make(
"logs.AuditLog",
_quantity=50,
)
days = 0
for item in audit_log: # type:ignore
item.entry_time = djangotime.now() - djangotime.timedelta(days=days)
item.save()
days = days + 5
# delete AgentHistory older than 30 days
prune_audit_log(30)
self.assertEqual(AuditLog.objects.count(), 6)

View File

@@ -5,7 +5,5 @@ from . import views
urlpatterns = [ urlpatterns = [
path("pendingactions/", views.PendingActions.as_view()), path("pendingactions/", views.PendingActions.as_view()),
path("auditlogs/", views.GetAuditLogs.as_view()), path("auditlogs/", views.GetAuditLogs.as_view()),
path("auditlogs/optionsfilter/", views.FilterOptionsAuditLog.as_view()), path("debuglog/", views.GetDebugLog.as_view()),
path("debuglog/<mode>/<hostname>/<order>/", views.debug_log),
path("downloadlog/", views.download_log),
] ]

View File

@@ -1,28 +1,23 @@
import asyncio import asyncio
import subprocess
from datetime import datetime as dt from datetime import datetime as dt
from django.conf import settings
from django.core.paginator import Paginator
from django.db.models import Q
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from accounts.models import User from accounts.models import User
from accounts.serializers import UserSerializer from accounts.serializers import UserSerializer
from agents.models import Agent from agents.models import Agent
from agents.serializers import AgentHostnameSerializer from agents.serializers import AgentHostnameSerializer
from tacticalrmm.utils import notify_error from django.core.paginator import Paginator
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from tacticalrmm.utils import notify_error, get_default_timezone
from .models import AuditLog, PendingAction from .models import AuditLog, PendingAction, DebugLog
from .permissions import AuditLogPerms, DebugLogPerms, ManagePendingActionPerms from .permissions import AuditLogPerms, DebugLogPerms, ManagePendingActionPerms
from .serializers import AuditLogSerializer, PendingActionSerializer from .serializers import AuditLogSerializer, DebugLogSerializer, PendingActionSerializer
class GetAuditLogs(APIView): class GetAuditLogs(APIView):
@@ -48,7 +43,7 @@ class GetAuditLogs(APIView):
timeFilter = Q() timeFilter = Q()
if "agentFilter" in request.data: if "agentFilter" in request.data:
agentFilter = Q(agent__in=request.data["agentFilter"]) agentFilter = Q(agent_id__in=request.data["agentFilter"])
elif "clientFilter" in request.data: elif "clientFilter" in request.data:
clients = Client.objects.filter( clients = Client.objects.filter(
@@ -84,36 +79,18 @@ class GetAuditLogs(APIView):
).order_by(order_by) ).order_by(order_by)
paginator = Paginator(audit_logs, pagination["rowsPerPage"]) paginator = Paginator(audit_logs, pagination["rowsPerPage"])
ctx = {"default_tz": get_default_timezone()}
return Response( return Response(
{ {
"audit_logs": AuditLogSerializer( "audit_logs": AuditLogSerializer(
paginator.get_page(pagination["page"]), many=True paginator.get_page(pagination["page"]), many=True, context=ctx
).data, ).data,
"total": paginator.count, "total": paginator.count,
} }
) )
class FilterOptionsAuditLog(APIView):
permission_classes = [IsAuthenticated, AuditLogPerms]
def post(self, request):
if request.data["type"] == "agent":
agents = Agent.objects.filter(hostname__icontains=request.data["pattern"])
return Response(AgentHostnameSerializer(agents, many=True).data)
if request.data["type"] == "user":
users = User.objects.filter(
username__icontains=request.data["pattern"],
agent=None,
is_installer_user=False,
)
return Response(UserSerializer(users, many=True).data)
return Response("error", status=status.HTTP_400_BAD_REQUEST)
class PendingActions(APIView): class PendingActions(APIView):
permission_classes = [IsAuthenticated, ManagePendingActionPerms] permission_classes = [IsAuthenticated, ManagePendingActionPerms]
@@ -158,60 +135,30 @@ class PendingActions(APIView):
return Response(f"{action.agent.hostname}: {action.description} was cancelled") return Response(f"{action.agent.hostname}: {action.description} was cancelled")
@api_view() class GetDebugLog(APIView):
@permission_classes([IsAuthenticated, DebugLogPerms]) permission_classes = [IsAuthenticated, DebugLogPerms]
def debug_log(request, mode, hostname, order):
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
agents = Agent.objects.prefetch_related("site").only("pk", "hostname") def patch(self, request):
agent_hostnames = AgentHostnameSerializer(agents, many=True) agentFilter = Q()
logTypeFilter = Q()
logLevelFilter = Q()
switch_mode = { if "logTypeFilter" in request.data:
"info": "INFO", logTypeFilter = Q(log_type=request.data["logTypeFilter"])
"critical": "CRITICAL",
"error": "ERROR",
"warning": "WARNING",
}
level = switch_mode.get(mode, "INFO")
if hostname == "all" and order == "latest": if "logLevelFilter" in request.data:
cmd = f"grep -h {level} {log_file} | tac" logLevelFilter = Q(log_level=request.data["logLevelFilter"])
elif hostname == "all" and order == "oldest":
cmd = f"grep -h {level} {log_file}"
elif hostname != "all" and order == "latest":
cmd = f"grep {hostname} {log_file} | grep -h {level} | tac"
elif hostname != "all" and order == "oldest":
cmd = f"grep {hostname} {log_file} | grep -h {level}"
else:
return Response("error", status=status.HTTP_400_BAD_REQUEST)
contents = subprocess.run( if "agentFilter" in request.data:
cmd, agentFilter = Q(agent=request.data["agentFilter"])
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
shell=True,
)
if not contents.stdout: debug_logs = (
resp = f"No {mode} logs" DebugLog.objects.prefetch_related("agent")
else: .filter(logLevelFilter)
resp = contents.stdout .filter(agentFilter)
.filter(logTypeFilter)
)
return Response({"log": resp, "agents": agent_hostnames.data}) ctx = {"default_tz": get_default_timezone()}
ret = DebugLogSerializer(debug_logs, many=True, context=ctx).data
return Response(ret)
@api_view()
@permission_classes([IsAuthenticated, DebugLogPerms])
def download_log(request):
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
if settings.DEBUG:
with open(log_file, "rb") as f:
response = HttpResponse(f.read(), content_type="text/plain")
response["Content-Disposition"] = "attachment; filename=debug.log"
return response
else:
response = HttpResponse()
response["Content-Disposition"] = "attachment; filename=debug.log"
response["X-Accel-Redirect"] = "/private/log/debug.log"
return response

View File

@@ -1,21 +1,22 @@
asgiref==3.3.4 asgiref==3.4.1
asyncio-nats-client==0.11.4 asyncio-nats-client==0.11.4
celery==5.1.1 celery==5.1.2
certifi==2021.5.30 certifi==2021.5.30
cffi==1.14.5 cffi==1.14.6
channels==3.0.3 channels==3.0.4
channels_redis==3.2.0 channels_redis==3.3.0
chardet==4.0.0 chardet==4.0.0
cryptography==3.4.7 cryptography==3.4.8
daphne==3.0.2 daphne==3.0.2
Django==3.2.4 Django==3.2.7
django-cors-headers==3.7.0 django-cors-headers==3.8.0
django-ipware==3.0.2
django-rest-knox==4.1.0 django-rest-knox==4.1.0
djangorestframework==3.12.4 djangorestframework==3.12.4
future==0.18.2 future==0.18.2
loguru==0.5.3 loguru==0.5.3
msgpack==1.0.2 msgpack==1.0.2
packaging==20.9 packaging==21.0
psycopg2-binary==2.9.1 psycopg2-binary==2.9.1
pycparser==2.20 pycparser==2.20
pycryptodome==3.10.1 pycryptodome==3.10.1
@@ -24,13 +25,13 @@ pyparsing==2.4.7
pytz==2021.1 pytz==2021.1
qrcode==6.1 qrcode==6.1
redis==3.5.3 redis==3.5.3
requests==2.25.1 requests==2.26.0
six==1.16.0 six==1.16.0
sqlparse==0.4.1 sqlparse==0.4.1
twilio==6.60.0 twilio==6.63.1
urllib3==1.26.5 urllib3==1.26.6
uWSGI==2.0.19.1 uWSGI==2.0.19.1
validators==0.18.2 validators==0.18.2
vine==5.0.0 vine==5.0.0
websockets==9.1 websockets==9.1
zipp==3.4.1 zipp==3.5.0

View File

@@ -175,11 +175,29 @@
"name": "Screenconnect - Get GUID for client", "name": "Screenconnect - Get GUID for client",
"description": "Returns Screenconnect GUID for client - Use with Custom Fields for later use. ", "description": "Returns Screenconnect GUID for client - Use with Custom Fields for later use. ",
"args": [ "args": [
"-serviceName {{client.ScreenConnectService}}" "{{client.ScreenConnectService}}"
], ],
"shell": "powershell", "shell": "powershell",
"category": "TRMM (Win):Collectors" "category": "TRMM (Win):Collectors"
}, },
{
"guid": "9cfdfe8f-82bf-4081-a59f-576d694f4649",
"filename": "Win_Teamviewer_Get_ID.ps1",
"submittedBy": "https://github.com/silversword411",
"name": "TeamViewer - Get ClientID for client",
"description": "Returns Teamviwer ClientID for client - Use with Custom Fields for later use. ",
"shell": "powershell",
"category": "TRMM (Win):Collectors"
},
{
"guid": "e43081d4-6f71-4ce3-881a-22da749f7a57",
"filename": "Win_AnyDesk_Get_Anynet_ID.ps1",
"submittedBy": "https://github.com/meuchels",
"name": "AnyDesk - Get AnyNetID for client",
"description": "Returns AnyNetID for client - Use with Custom Fields for later use. ",
"shell": "powershell",
"category": "TRMM (Win):Collectors"
},
{ {
"guid": "95a2ee6f-b89b-4551-856e-3081b041caa7", "guid": "95a2ee6f-b89b-4551-856e-3081b041caa7",
"filename": "Win_Power_Profile_Reset_High_Performance_to_Defaults.ps1", "filename": "Win_Power_Profile_Reset_High_Performance_to_Defaults.ps1",
@@ -226,6 +244,30 @@
"shell": "powershell", "shell": "powershell",
"category": "TRMM (Win):3rd Party Software" "category": "TRMM (Win):3rd Party Software"
}, },
{
"guid": "907652a5-9ec1-4759-9871-a7743f805ff2",
"filename": "Win_Software_Uninstall.ps1",
"submittedBy": "https://github.com/subzdev",
"name": "Software Uninstaller - list, find, and uninstall most software",
"description": "Allows listing, finding and uninstalling most software on Windows. There will be a best effort to uninstall silently if the silent uninstall string is not provided.",
"shell": "powershell",
"category": "TRMM (Win):3rd Party Software",
"default_timeout": "600"
},
{
"guid": "64c3b1a8-c85f-4800-85a3-485f78a2d9ad",
"filename": "Win_Bitdefender_GravityZone_Install.ps1",
"submittedBy": "https://github.com/jhtechIL/",
"name": "BitDefender Gravity Zone Install",
"description": "Installs BitDefender Gravity Zone, requires client custom field setup. See script comments for details",
"args": [
"-url {{client.bdurl}}",
"-exe {{client.bdexe}}"
],
"default_timeout": "2500",
"shell": "powershell",
"category": "TRMM (Win):3rd Party Software"
},
{ {
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5", "guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
"filename": "Win_Defender_Enable.ps1", "filename": "Win_Defender_Enable.ps1",
@@ -254,6 +296,16 @@
"shell": "cmd", "shell": "cmd",
"category": "TRMM (Win):Windows Features" "category": "TRMM (Win):Windows Features"
}, },
{
"guid": "0afd8d00-b95b-4318-8d07-0b9bc4424287",
"filename": "Win_Feature_NET35_Enable.ps1",
"submittedBy": "https://github.com/silversword411",
"name": "Windows Feature - Enable .NET 3.5",
"description": "Enables the Windows .NET 3.5 Framework in Turn Features on and off",
"shell": "powershell",
"default_timeout": "300",
"category": "TRMM (Win):Windows Features"
},
{ {
"guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940", "guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940",
"filename": "Win_Speedtest.ps1", "filename": "Win_Speedtest.ps1",
@@ -368,14 +420,14 @@
"category": "TRMM (Win):Other" "category": "TRMM (Win):Other"
}, },
{ {
"guid": "5615aa90-0272-427b-8acf-0ca019612501", "guid": "6c78eb04-57ae-43b0-98ed-cbd3ef9e2f80",
"filename": "Win_Chocolatey_Update_Installed.bat", "filename": "Win_Chocolatey_Manage_Apps_Bulk.ps1",
"submittedBy": "https://github.com/silversword411", "submittedBy": "https://github.com/silversword411",
"name": "Update Installed Apps", "name": "Chocolatey - Install, Uninstall and Upgrade Software",
"description": "Update all apps that were installed using Chocolatey.", "description": "This script installs, uninstalls and updates software using Chocolatey with logic to slow tasks to minimize hitting community limits. Mode install/uninstall/upgrade Hosts x",
"shell": "cmd", "shell": "powershell",
"category": "TRMM (Win):3rd Party Software>Chocolatey", "category": "TRMM (Win):3rd Party Software>Chocolatey",
"default_timeout": "3600" "default_timeout": "600"
}, },
{ {
"guid": "fff8024d-d72e-4457-84fa-6c780f69a16f", "guid": "fff8024d-d72e-4457-84fa-6c780f69a16f",
@@ -450,6 +502,16 @@
"shell": "powershell", "shell": "powershell",
"category": "TRMM (Win):Updates" "category": "TRMM (Win):Updates"
}, },
{
"guid": "93038ae0-58ce-433e-a3b9-bc99ad1ea79a",
"filename": "Win_Services_AutomaticStartup_Running.ps1",
"submittedBy": "https://github.com/silversword411",
"name": "Ensure all services with startup type Automatic are running",
"description": "Gets a list of all service with startup type of Automatic but aren't running and tries to start them",
"shell": "powershell",
"default_timeout": "300",
"category": "TRMM (Win):Updates"
},
{ {
"guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8", "guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8",
"filename": "Win_ScreenConnectAIO.ps1", "filename": "Win_ScreenConnectAIO.ps1",
@@ -507,6 +569,16 @@
"category": "TRMM (Win):Network", "category": "TRMM (Win):Network",
"default_timeout": "90" "default_timeout": "90"
}, },
{
"guid": "7c0c7e37-60ff-462f-9c34-b5cd4c4796a7",
"filename": "Win_Wifi_SSID_and_Password_Retrieval.ps1",
"submittedBy": "https://github.com/silversword411",
"name": "Network Wireless - Retrieve Saved passwords",
"description": "Returns all saved wifi passwords stored on the computer",
"shell": "powershell",
"category": "TRMM (Win):Network",
"default_timeout": "90"
},
{ {
"guid": "abe78170-7cf9-435b-9666-c5ef6c11a106", "guid": "abe78170-7cf9-435b-9666-c5ef6c11a106",
"filename": "Win_Network_IPv6_Disable.ps1", "filename": "Win_Network_IPv6_Disable.ps1",
@@ -527,6 +599,16 @@
"category": "TRMM (Win):Network", "category": "TRMM (Win):Network",
"default_timeout": "90" "default_timeout": "90"
}, },
{
"guid": "5676acca-44e5-46c8-af61-ae795ecb3ef1",
"filename": "Win_Network_IP_DHCP_Renew.bat",
"submittedBy": "https://github.com/silversword411",
"name": "Network - Release and Renew IP",
"description": "Trigger and release and renew of IP address on all network adapters",
"shell": "cmd",
"category": "TRMM (Win):Network",
"default_timeout": "90"
},
{ {
"guid": "83aa4d51-63ce-41e7-829f-3c16e6115bbf", "guid": "83aa4d51-63ce-41e7-829f-3c16e6115bbf",
"filename": "Win_Network_DNS_Set_to_1.1.1.2.ps1", "filename": "Win_Network_DNS_Set_to_1.1.1.2.ps1",
@@ -557,6 +639,16 @@
"category": "TRMM (Win):Other", "category": "TRMM (Win):Other",
"default_timeout": "90" "default_timeout": "90"
}, },
{
"guid": "43e65e5f-717a-4b6d-a724-1a86229fcd42",
"filename": "Win_Activation_Check.ps1",
"submittedBy": "https://github.com/dinger1986",
"name": "Windows Activation check",
"description": "Checks to see if windows is activated and returns status",
"shell": "powershell",
"category": "TRMM (Win):Other",
"default_timeout": "120"
},
{ {
"guid": "83f6c6ea-6120-4fd3-bec8-d3abc505dcdf", "guid": "83f6c6ea-6120-4fd3-bec8-d3abc505dcdf",
"filename": "Win_TRMM_Start_Menu_Delete_Shortcut.ps1", "filename": "Win_TRMM_Start_Menu_Delete_Shortcut.ps1",

View File

@@ -0,0 +1,22 @@
# Generated by Django 3.2.1 on 2021-07-21 19:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scripts', '0008_script_guid'),
]
operations = [
migrations.CreateModel(
name='ScriptSnippet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=40)),
('code', models.TextField()),
('shell', models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], max_length=15)),
],
),
]

View File

@@ -0,0 +1,33 @@
# Generated by Django 3.2.1 on 2021-07-26 16:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scripts', '0009_scriptsnippet'),
]
operations = [
migrations.AddField(
model_name='scriptsnippet',
name='desc',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='script',
name='code_base64',
field=models.TextField(blank=True, default='', null=True),
),
migrations.AlterField(
model_name='script',
name='description',
field=models.TextField(blank=True, default='', null=True),
),
migrations.AlterField(
model_name='scriptsnippet',
name='name',
field=models.CharField(max_length=40, unique=True),
),
]

View File

@@ -0,0 +1,28 @@
# Generated by Django 3.2.1 on 2021-07-31 17:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scripts', '0010_auto_20210726_1634'),
]
operations = [
migrations.AlterField(
model_name='scriptsnippet',
name='code',
field=models.TextField(default=''),
),
migrations.AlterField(
model_name='scriptsnippet',
name='desc',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='scriptsnippet',
name='shell',
field=models.CharField(choices=[('powershell', 'Powershell'), ('cmd', 'Batch (CMD)'), ('python', 'Python')], default='powershell', max_length=15),
),
]

View File

@@ -1,12 +1,10 @@
import base64 import base64
import re import re
from typing import List, Optional from typing import List
from django.conf import settings
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db import models from django.db import models
from loguru import logger from django.db.models.fields import CharField, TextField
from logs.models import BaseAuditModel from logs.models import BaseAuditModel
from tacticalrmm.utils import replace_db_values from tacticalrmm.utils import replace_db_values
@@ -21,13 +19,11 @@ SCRIPT_TYPES = [
("builtin", "Built In"), ("builtin", "Built In"),
] ]
logger.configure(**settings.LOG_CONFIG)
class Script(BaseAuditModel): class Script(BaseAuditModel):
guid = name = models.CharField(max_length=64, null=True, blank=True) guid = models.CharField(max_length=64, null=True, blank=True)
name = models.CharField(max_length=255) name = models.CharField(max_length=255)
description = models.TextField(null=True, blank=True) description = models.TextField(null=True, blank=True, default="")
filename = models.CharField(max_length=255) # deprecated filename = models.CharField(max_length=255) # deprecated
shell = models.CharField( shell = models.CharField(
max_length=100, choices=SCRIPT_SHELLS, default="powershell" max_length=100, choices=SCRIPT_SHELLS, default="powershell"
@@ -43,20 +39,44 @@ class Script(BaseAuditModel):
) )
favorite = models.BooleanField(default=False) favorite = models.BooleanField(default=False)
category = models.CharField(max_length=100, null=True, blank=True) category = models.CharField(max_length=100, null=True, blank=True)
code_base64 = models.TextField(null=True, blank=True) code_base64 = models.TextField(null=True, blank=True, default="")
default_timeout = models.PositiveIntegerField(default=90) default_timeout = models.PositiveIntegerField(default=90)
def __str__(self): def __str__(self):
return self.name return self.name
@property @property
def code(self): def code_no_snippets(self):
if self.code_base64: if self.code_base64:
base64_bytes = self.code_base64.encode("ascii", "ignore") return base64.b64decode(self.code_base64.encode("ascii", "ignore")).decode(
return base64.b64decode(base64_bytes).decode("ascii", "ignore") "ascii", "ignore"
)
else: else:
return "" return ""
@property
def code(self):
return self.replace_with_snippets(self.code_no_snippets)
@classmethod
def replace_with_snippets(cls, code):
# check if snippet has been added to script body
matches = re.finditer(r"{{(.*)}}", code)
if matches:
replaced_code = code
for snippet in matches:
snippet_name = snippet.group(1).strip()
if ScriptSnippet.objects.filter(name=snippet_name).exists():
value = ScriptSnippet.objects.get(name=snippet_name).code
else:
value = ""
replaced_code = re.sub(snippet.group(), value, replaced_code)
return replaced_code
else:
return code
@classmethod @classmethod
def load_community_scripts(cls): def load_community_scripts(cls):
import json import json
@@ -97,20 +117,20 @@ class Script(BaseAuditModel):
if s.exists(): if s.exists():
i = s.first() i = s.first()
i.name = script["name"] i.name = script["name"] # type: ignore
i.description = script["description"] i.description = script["description"] # type: ignore
i.category = category i.category = category # type: ignore
i.shell = script["shell"] i.shell = script["shell"] # type: ignore
i.default_timeout = default_timeout i.default_timeout = default_timeout # type: ignore
i.args = args i.args = args # type: ignore
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f: with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
script_bytes = ( script_bytes = (
f.read().decode("utf-8").encode("ascii", "ignore") f.read().decode("utf-8").encode("ascii", "ignore")
) )
i.code_base64 = base64.b64encode(script_bytes).decode("ascii") i.code_base64 = base64.b64encode(script_bytes).decode("ascii") # type: ignore
i.save( i.save( # type: ignore
update_fields=[ update_fields=[
"name", "name",
"description", "description",
@@ -175,7 +195,6 @@ class Script(BaseAuditModel):
guid=script["guid"], guid=script["guid"],
name=script["name"], name=script["name"],
description=script["description"], description=script["description"],
filename=script["filename"],
shell=script["shell"], shell=script["shell"],
script_type="builtin", script_type="builtin",
category=category, category=category,
@@ -209,7 +228,7 @@ class Script(BaseAuditModel):
if match: if match:
# only get the match between the () in regex # only get the match between the () in regex
string = match.group(1) string = match.group(1)
value = replace_db_values(string=string, agent=agent, shell=shell) value = replace_db_values(string=string, instance=agent, shell=shell)
if value: if value:
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg))
@@ -221,3 +240,13 @@ class Script(BaseAuditModel):
temp_args.append(arg) temp_args.append(arg)
return temp_args return temp_args
class ScriptSnippet(models.Model):
name = CharField(max_length=40, unique=True)
desc = CharField(max_length=50, blank=True, default="")
code = TextField(default="")
shell = CharField(max_length=15, choices=SCRIPT_SHELLS, default="powershell")
def __str__(self):
return self.name

View File

@@ -1,6 +1,6 @@
from rest_framework.serializers import ModelSerializer, ReadOnlyField from rest_framework.serializers import ModelSerializer, ReadOnlyField
from .models import Script from .models import Script, ScriptSnippet
class ScriptTableSerializer(ModelSerializer): class ScriptTableSerializer(ModelSerializer):
@@ -41,3 +41,9 @@ class ScriptCheckSerializer(ModelSerializer):
class Meta: class Meta:
model = Script model = Script
fields = ["code", "shell"] fields = ["code", "shell"]
class ScriptSnippetSerializer(ModelSerializer):
class Meta:
model = ScriptSnippet
fields = "__all__"

View File

@@ -1,12 +1,16 @@
import asyncio import asyncio
from agents.models import Agent from packaging import version as pyver
from agents.models import Agent, AgentHistory
from scripts.models import Script from scripts.models import Script
from tacticalrmm.celery import app from tacticalrmm.celery import app
@app.task @app.task
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None: def handle_bulk_command_task(
agentpks, cmd, shell, timeout, username, run_on_offline=False
) -> None:
nats_data = { nats_data = {
"func": "rawcmd", "func": "rawcmd",
"timeout": timeout, "timeout": timeout,
@@ -16,11 +20,31 @@ def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
}, },
} }
for agent in Agent.objects.filter(pk__in=agentpks): for agent in Agent.objects.filter(pk__in=agentpks):
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
hist = AgentHistory.objects.create(
agent=agent,
type="cmd_run",
command=cmd,
username=username,
)
nats_data["id"] = hist.pk
asyncio.run(agent.nats_cmd(nats_data, wait=False)) asyncio.run(agent.nats_cmd(nats_data, wait=False))
@app.task @app.task
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None: def handle_bulk_script_task(scriptpk, agentpks, args, timeout, username) -> None:
script = Script.objects.get(pk=scriptpk) script = Script.objects.get(pk=scriptpk)
for agent in Agent.objects.filter(pk__in=agentpks): for agent in Agent.objects.filter(pk__in=agentpks):
agent.run_script(scriptpk=script.pk, args=args, timeout=timeout) history_pk = 0
if pyver.parse(agent.version) >= pyver.parse("1.6.0"):
hist = AgentHistory.objects.create(
agent=agent,
type="script_run",
script=script,
username=username,
)
history_pk = hist.pk
agent.run_script(
scriptpk=script.pk, args=args, timeout=timeout, history_pk=history_pk
)

View File

@@ -1,15 +1,18 @@
import json import json
import os import os
from pathlib import Path from pathlib import Path
from unittest.mock import patch
from django.conf import settings from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from model_bakery import baker from model_bakery import baker
from tacticalrmm.test import TacticalTestCase from tacticalrmm.test import TacticalTestCase
from .models import Script from .models import Script, ScriptSnippet
from .serializers import ScriptSerializer, ScriptTableSerializer from .serializers import (
ScriptSerializer,
ScriptTableSerializer,
ScriptSnippetSerializer,
)
class TestScriptViews(TacticalTestCase): class TestScriptViews(TacticalTestCase):
@@ -18,7 +21,7 @@ class TestScriptViews(TacticalTestCase):
self.authenticate() self.authenticate()
def test_get_scripts(self): def test_get_scripts(self):
url = "/scripts/scripts/" url = "/scripts/"
scripts = baker.make("scripts.Script", _quantity=3) scripts = baker.make("scripts.Script", _quantity=3)
serializer = ScriptTableSerializer(scripts, many=True) serializer = ScriptTableSerializer(scripts, many=True)
@@ -29,14 +32,14 @@ class TestScriptViews(TacticalTestCase):
self.check_not_authenticated("get", url) self.check_not_authenticated("get", url)
def test_add_script(self): def test_add_script(self):
url = f"/scripts/scripts/" url = f"/scripts/"
data = { data = {
"name": "Name", "name": "Name",
"description": "Description", "description": "Description",
"shell": "powershell", "shell": "powershell",
"category": "New", "category": "New",
"code": "Some Test Code\nnew Line", "code_base64": "VGVzdA==", # Test
"default_timeout": 99, "default_timeout": 99,
"args": ["hello", "world", r"{{agent.public_ip}}"], "args": ["hello", "world", r"{{agent.public_ip}}"],
"favorite": False, "favorite": False,
@@ -46,47 +49,24 @@ class TestScriptViews(TacticalTestCase):
resp = self.client.post(url, data, format="json") resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
self.assertTrue(Script.objects.filter(name="Name").exists()) self.assertTrue(Script.objects.filter(name="Name").exists())
self.assertEqual(Script.objects.get(name="Name").code, data["code"]) self.assertEqual(Script.objects.get(name="Name").code, "Test")
# test with file upload
# file with 'Test' as content
file = SimpleUploadedFile(
"test_script.bat", b"\x54\x65\x73\x74", content_type="text/plain"
)
data = {
"name": "New Name",
"description": "Description",
"shell": "cmd",
"category": "New",
"filename": file,
"default_timeout": 4455,
"args": json.dumps(
["hello", "world", r"{{agent.public_ip}}"]
), # simulate javascript's JSON.stringify() for formData
}
# test with file upload
resp = self.client.post(url, data, format="multipart")
self.assertEqual(resp.status_code, 200)
script = Script.objects.filter(name="New Name").first()
self.assertEquals(script.code, "Test")
self.check_not_authenticated("post", url) self.check_not_authenticated("post", url)
def test_modify_script(self): def test_modify_script(self):
# test a call where script doesn't exist # test a call where script doesn't exist
resp = self.client.put("/scripts/500/script/", format="json") resp = self.client.put("/scripts/500/", format="json")
self.assertEqual(resp.status_code, 404) self.assertEqual(resp.status_code, 404)
# make a userdefined script # make a userdefined script
script = baker.make_recipe("scripts.script") script = baker.make_recipe("scripts.script")
url = f"/scripts/{script.pk}/script/" url = f"/scripts/{script.pk}/"
data = { data = {
"name": script.name, "name": script.name,
"description": "Description Change", "description": "Description Change",
"shell": script.shell, "shell": script.shell,
"code": "Test Code\nAnother Line", "code_base64": "VGVzdA==", # Test
"default_timeout": 13344556, "default_timeout": 13344556,
} }
@@ -95,16 +75,18 @@ class TestScriptViews(TacticalTestCase):
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
script = Script.objects.get(pk=script.pk) script = Script.objects.get(pk=script.pk)
self.assertEquals(script.description, "Description Change") self.assertEquals(script.description, "Description Change")
self.assertEquals(script.code, "Test Code\nAnother Line") self.assertEquals(script.code, "Test")
# test edit a builtin script # test edit a builtin script
data = {"name": "New Name", "description": "New Desc", "code": "Some New Code"} data = {
"name": "New Name",
"description": "New Desc",
"code_base64": "VGVzdA==",
} # Test
builtin_script = baker.make_recipe("scripts.script", script_type="builtin") builtin_script = baker.make_recipe("scripts.script", script_type="builtin")
resp = self.client.put( resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
f"/scripts/{builtin_script.pk}/script/", data, format="json"
)
self.assertEqual(resp.status_code, 400) self.assertEqual(resp.status_code, 400)
data = { data = {
@@ -112,13 +94,11 @@ class TestScriptViews(TacticalTestCase):
"description": "Description Change", "description": "Description Change",
"shell": script.shell, "shell": script.shell,
"favorite": True, "favorite": True,
"code": "Test Code\nAnother Line", "code_base64": "VGVzdA==", # Test
"default_timeout": 54345, "default_timeout": 54345,
} }
# test marking a builtin script as favorite # test marking a builtin script as favorite
resp = self.client.put( resp = self.client.put(f"/scripts/{builtin_script.pk}/", data, format="json")
f"/scripts/{builtin_script.pk}/script/", data, format="json"
)
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
self.assertTrue(Script.objects.get(pk=builtin_script.pk).favorite) self.assertTrue(Script.objects.get(pk=builtin_script.pk).favorite)
@@ -126,11 +106,11 @@ class TestScriptViews(TacticalTestCase):
def test_get_script(self): def test_get_script(self):
# test a call where script doesn't exist # test a call where script doesn't exist
resp = self.client.get("/scripts/500/script/", format="json") resp = self.client.get("/scripts/500/", format="json")
self.assertEqual(resp.status_code, 404) self.assertEqual(resp.status_code, 404)
script = baker.make("scripts.Script") script = baker.make("scripts.Script")
url = f"/scripts/{script.pk}/script/" # type: ignore url = f"/scripts/{script.pk}/" # type: ignore
serializer = ScriptSerializer(script) serializer = ScriptSerializer(script)
resp = self.client.get(url, format="json") resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
@@ -138,14 +118,34 @@ class TestScriptViews(TacticalTestCase):
self.check_not_authenticated("get", url) self.check_not_authenticated("get", url)
@patch("agents.models.Agent.nats_cmd")
def test_test_script(self, run_script):
url = "/scripts/testscript/"
run_script.return_value = "return value"
agent = baker.make_recipe("agents.agent")
data = {
"agent": agent.pk,
"code": "some_code",
"timeout": 90,
"args": [],
"shell": "powershell",
}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, "return value") # type: ignore
self.check_not_authenticated("post", url)
def test_delete_script(self): def test_delete_script(self):
# test a call where script doesn't exist # test a call where script doesn't exist
resp = self.client.delete("/scripts/500/script/", format="json") resp = self.client.delete("/scripts/500/", format="json")
self.assertEqual(resp.status_code, 404) self.assertEqual(resp.status_code, 404)
# test delete script # test delete script
script = baker.make_recipe("scripts.script") script = baker.make_recipe("scripts.script")
url = f"/scripts/{script.pk}/script/" url = f"/scripts/{script.pk}/"
resp = self.client.delete(url, format="json") resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
@@ -153,7 +153,7 @@ class TestScriptViews(TacticalTestCase):
# test delete community script # test delete community script
script = baker.make_recipe("scripts.script", script_type="builtin") script = baker.make_recipe("scripts.script", script_type="builtin")
url = f"/scripts/{script.pk}/script/" url = f"/scripts/{script.pk}/"
resp = self.client.delete(url, format="json") resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 400) self.assertEqual(resp.status_code, 400)
@@ -161,7 +161,7 @@ class TestScriptViews(TacticalTestCase):
def test_download_script(self): def test_download_script(self):
# test a call where script doesn't exist # test a call where script doesn't exist
resp = self.client.get("/scripts/500/download/", format="json") resp = self.client.get("/scripts/download/500/", format="json")
self.assertEqual(resp.status_code, 404) self.assertEqual(resp.status_code, 404)
# return script code property should be "Test" # return script code property should be "Test"
@@ -170,7 +170,7 @@ class TestScriptViews(TacticalTestCase):
script = baker.make( script = baker.make(
"scripts.Script", code_base64="VGVzdA==", shell="powershell" "scripts.Script", code_base64="VGVzdA==", shell="powershell"
) )
url = f"/scripts/{script.pk}/download/" # type: ignore url = f"/scripts/download/{script.pk}/" # type: ignore
resp = self.client.get(url, format="json") resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
@@ -178,7 +178,7 @@ class TestScriptViews(TacticalTestCase):
# test batch file # test batch file
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd") script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
url = f"/scripts/{script.pk}/download/" # type: ignore url = f"/scripts/download/{script.pk}/" # type: ignore
resp = self.client.get(url, format="json") resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
@@ -186,7 +186,7 @@ class TestScriptViews(TacticalTestCase):
# test python file # test python file
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python") script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
url = f"/scripts/{script.pk}/download/" # type: ignore url = f"/scripts/download/{script.pk}/" # type: ignore
resp = self.client.get(url, format="json") resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200) self.assertEqual(resp.status_code, 200)
@@ -497,3 +497,106 @@ class TestScriptViews(TacticalTestCase):
["-Parameter", "-Another $True"], ["-Parameter", "-Another $True"],
Script.parse_script_args(agent=agent, shell="powershell", args=args), Script.parse_script_args(agent=agent, shell="powershell", args=args),
) )
class TestScriptSnippetViews(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
self.authenticate()
def test_get_script_snippets(self):
url = "/scripts/snippets/"
snippets = baker.make("scripts.ScriptSnippet", _quantity=3)
serializer = ScriptSnippetSerializer(snippets, many=True)
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(serializer.data, resp.data) # type: ignore
self.check_not_authenticated("get", url)
def test_add_script_snippet(self):
url = f"/scripts/snippets/"
data = {
"name": "Name",
"description": "Description",
"shell": "powershell",
"code": "Test",
}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
self.assertTrue(ScriptSnippet.objects.filter(name="Name").exists())
self.check_not_authenticated("post", url)
def test_modify_script_snippet(self):
# test a call where script doesn't exist
resp = self.client.put("/scripts/snippets/500/", format="json")
self.assertEqual(resp.status_code, 404)
# make a userdefined script
snippet = baker.make("scripts.ScriptSnippet", name="Test")
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
data = {"name": "New Name"} # type: ignore
resp = self.client.put(url, data, format="json")
self.assertEqual(resp.status_code, 200)
snippet = ScriptSnippet.objects.get(pk=snippet.pk) # type: ignore
self.assertEquals(snippet.name, "New Name")
self.check_not_authenticated("put", url)
def test_get_script_snippet(self):
# test a call where script doesn't exist
resp = self.client.get("/scripts/snippets/500/", format="json")
self.assertEqual(resp.status_code, 404)
snippet = baker.make("scripts.ScriptSnippet")
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
serializer = ScriptSnippetSerializer(snippet)
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertEqual(serializer.data, resp.data) # type: ignore
self.check_not_authenticated("get", url)
def test_delete_script_snippet(self):
# test a call where script doesn't exist
resp = self.client.delete("/scripts/snippets/500/", format="json")
self.assertEqual(resp.status_code, 404)
# test delete script snippet
snippet = baker.make("scripts.ScriptSnippet")
url = f"/scripts/snippets/{snippet.pk}/" # type: ignore
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
self.assertFalse(ScriptSnippet.objects.filter(pk=snippet.pk).exists()) # type: ignore
self.check_not_authenticated("delete", url)
def test_snippet_replacement(self):
snippet1 = baker.make(
"scripts.ScriptSnippet", name="snippet1", code="Snippet 1 Code"
)
snippet2 = baker.make(
"scripts.ScriptSnippet", name="snippet2", code="Snippet 2 Code"
)
test_no_snippet = "No Snippets Here"
test_with_snippet = "Snippet 1: {{snippet1}}\nSnippet 2: {{snippet2}}"
# test putting snippet in text
result = Script.replace_with_snippets(test_with_snippet)
self.assertEqual(
result,
f"Snippet 1: {snippet1.code}\nSnippet 2: {snippet2.code}", # type:ignore
)
# test text with no snippets
result = Script.replace_with_snippets(test_no_snippet)
self.assertEqual(result, test_no_snippet)

View File

@@ -3,7 +3,10 @@ from django.urls import path
from . import views from . import views
urlpatterns = [ urlpatterns = [
path("scripts/", views.GetAddScripts.as_view()), path("", views.GetAddScripts.as_view()),
path("<int:pk>/script/", views.GetUpdateDeleteScript.as_view()), path("<int:pk>/", views.GetUpdateDeleteScript.as_view()),
path("<int:pk>/download/", views.download), path("snippets/", views.GetAddScriptSnippets.as_view()),
path("snippets/<int:pk>/", views.GetUpdateDeleteScriptSnippet.as_view()),
path("testscript/", views.TestScript.as_view()),
path("download/<int:pk>/", views.download),
] ]

View File

@@ -1,64 +1,39 @@
import base64 import base64
import json import asyncio
from django.conf import settings
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from loguru import logger
from rest_framework.decorators import api_view, permission_classes from rest_framework.decorators import api_view, permission_classes
from rest_framework.parsers import FileUploadParser
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from tacticalrmm.utils import notify_error from tacticalrmm.utils import notify_error
from .models import Script from .models import Script, ScriptSnippet
from .permissions import ManageScriptsPerms from .permissions import ManageScriptsPerms
from .serializers import ScriptSerializer, ScriptTableSerializer from agents.permissions import RunScriptPerms
from .serializers import (
logger.configure(**settings.LOG_CONFIG) ScriptSerializer,
ScriptTableSerializer,
ScriptSnippetSerializer,
)
class GetAddScripts(APIView): class GetAddScripts(APIView):
permission_classes = [IsAuthenticated, ManageScriptsPerms] permission_classes = [IsAuthenticated, ManageScriptsPerms]
parser_class = (FileUploadParser,)
def get(self, request): def get(self, request):
scripts = Script.objects.all()
showCommunityScripts = request.GET.get("showCommunityScripts", True)
if not showCommunityScripts or showCommunityScripts == "false":
scripts = Script.objects.filter(script_type="userdefined")
else:
scripts = Script.objects.all()
return Response(ScriptTableSerializer(scripts, many=True).data) return Response(ScriptTableSerializer(scripts, many=True).data)
def post(self, request, format=None): def post(self, request):
data = {
"name": request.data["name"],
"category": request.data["category"],
"description": request.data["description"],
"shell": request.data["shell"],
"default_timeout": request.data["default_timeout"],
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
}
# code editor upload serializer = ScriptSerializer(data=request.data, partial=True)
if "args" in request.data.keys() and isinstance(request.data["args"], list):
data["args"] = request.data["args"]
# file upload, have to json load it cuz it's formData
if "args" in request.data.keys() and "file_upload" in request.data.keys():
data["args"] = json.loads(request.data["args"])
if "favorite" in request.data.keys():
data["favorite"] = request.data["favorite"]
if "filename" in request.data.keys():
message_bytes = request.data["filename"].read()
data["code_base64"] = base64.b64encode(message_bytes).decode(
"ascii", "ignore"
)
elif "code" in request.data.keys():
message_bytes = request.data["code"].encode("ascii", "ignore")
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
serializer = ScriptSerializer(data=data, partial=True)
serializer.is_valid(raise_exception=True) serializer.is_valid(raise_exception=True)
obj = serializer.save() obj = serializer.save()
@@ -85,11 +60,6 @@ class GetUpdateDeleteScript(APIView):
else: else:
return notify_error("Community scripts cannot be edited.") return notify_error("Community scripts cannot be edited.")
elif "code" in data:
message_bytes = data["code"].encode("ascii")
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
data.pop("code")
serializer = ScriptSerializer(data=data, instance=script, partial=True) serializer = ScriptSerializer(data=data, instance=script, partial=True)
serializer.is_valid(raise_exception=True) serializer.is_valid(raise_exception=True)
obj = serializer.save() obj = serializer.save()
@@ -107,11 +77,87 @@ class GetUpdateDeleteScript(APIView):
return Response(f"{script.name} was deleted!") return Response(f"{script.name} was deleted!")
class GetAddScriptSnippets(APIView):
permission_classes = [IsAuthenticated, ManageScriptsPerms]
def get(self, request):
snippets = ScriptSnippet.objects.all()
return Response(ScriptSnippetSerializer(snippets, many=True).data)
def post(self, request):
serializer = ScriptSnippetSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("Script snippet was saved successfully")
class GetUpdateDeleteScriptSnippet(APIView):
permission_classes = [IsAuthenticated, ManageScriptsPerms]
def get(self, request, pk):
snippet = get_object_or_404(ScriptSnippet, pk=pk)
return Response(ScriptSnippetSerializer(snippet).data)
def put(self, request, pk):
snippet = get_object_or_404(ScriptSnippet, pk=pk)
serializer = ScriptSnippetSerializer(
instance=snippet, data=request.data, partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("Script snippet was saved successfully")
def delete(self, request, pk):
snippet = get_object_or_404(ScriptSnippet, pk=pk)
snippet.delete()
return Response("Script snippet was deleted successfully")
class TestScript(APIView):
permission_classes = [IsAuthenticated, RunScriptPerms]
def post(self, request):
from .models import Script
from agents.models import Agent
agent = get_object_or_404(Agent, pk=request.data["agent"])
parsed_args = Script.parse_script_args(
agent, request.data["shell"], request.data["args"]
)
data = {
"func": "runscript",
"timeout": request.data["timeout"],
"script_args": parsed_args,
"payload": {
"code": Script.replace_with_snippets(request.data["code"]),
"shell": request.data["shell"],
},
}
r = asyncio.run(
agent.nats_cmd(data, timeout=request.data["timeout"], wait=True)
)
return Response(r)
@api_view() @api_view()
@permission_classes([IsAuthenticated, ManageScriptsPerms]) @permission_classes([IsAuthenticated, ManageScriptsPerms])
def download(request, pk): def download(request, pk):
script = get_object_or_404(Script, pk=pk) script = get_object_or_404(Script, pk=pk)
with_snippets = request.GET.get("with_snippets", True)
if with_snippets == "false":
with_snippets = False
if script.shell == "powershell": if script.shell == "powershell":
filename = f"{script.name}.ps1" filename = f"{script.name}.ps1"
elif script.shell == "cmd": elif script.shell == "cmd":
@@ -119,4 +165,9 @@ def download(request, pk):
else: else:
filename = f"{script.name}.py" filename = f"{script.name}.py"
return Response({"filename": filename, "code": script.code}) return Response(
{
"filename": filename,
"code": script.code if with_snippets else script.code_no_snippets,
}
)

View File

@@ -1,21 +1,16 @@
import asyncio import asyncio
from django.conf import settings
from django.shortcuts import get_object_or_404
from loguru import logger
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from agents.models import Agent from agents.models import Agent
from checks.models import Check from checks.models import Check
from django.shortcuts import get_object_or_404
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from tacticalrmm.utils import notify_error from tacticalrmm.utils import notify_error
from .permissions import ManageWinSvcsPerms from .permissions import ManageWinSvcsPerms
from .serializers import ServicesSerializer from .serializers import ServicesSerializer
logger.configure(**settings.LOG_CONFIG)
@api_view() @api_view()
def get_services(request, pk): def get_services(request, pk):

View File

@@ -14,7 +14,15 @@ class Command(BaseCommand):
agents = Agent.objects.all() agents = Agent.objects.all()
for agent in agents: for agent in agents:
sw = agent.installedsoftware_set.first().software try:
sw = agent.installedsoftware_set.first().software
except:
self.stdout.write(
self.style.ERROR(
f"Agent {agent.hostname} missing software list. Try manually refreshing it from the web UI from the software tab."
)
)
continue
for i in sw: for i in sw:
if search in i["name"].lower(): if search in i["name"].lower():
self.stdout.write( self.stdout.write(

View File

@@ -5,6 +5,6 @@ from . import views
urlpatterns = [ urlpatterns = [
path("chocos/", views.chocos), path("chocos/", views.chocos),
path("install/", views.install), path("install/", views.install),
path("installed/<pk>/", views.get_installed), path("installed/<int:pk>/", views.get_installed),
path("refresh/<pk>/", views.refresh_installed), path("refresh/<int:pk>/", views.refresh_installed),
] ]

View File

@@ -0,0 +1,64 @@
from django.utils import timezone as djangotime
from django.utils.translation import ugettext_lazy as _
from rest_framework import exceptions
from rest_framework.authentication import BaseAuthentication, HTTP_HEADER_ENCODING
from accounts.models import APIKey
def get_authorization_header(request):
"""
Return request's 'Authorization:' header, as a bytestring.
Hide some test client ickyness where the header can be unicode.
"""
auth = request.META.get("HTTP_X_API_KEY", b"")
if isinstance(auth, str):
# Work around django test client oddness
auth = auth.encode(HTTP_HEADER_ENCODING)
return auth
class APIAuthentication(BaseAuthentication):
"""
Simple token based authentication for stateless api access.
Clients should authenticate by passing the token key in the "X-API-KEY"
HTTP header. For example:
X-API-KEY: ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789
"""
def get_model(self):
return APIKey
def authenticate(self, request):
auth = get_authorization_header(request)
if not auth:
return None
try:
apikey = auth.decode()
except UnicodeError:
msg = _(
"Invalid token header. Token string should not contain invalid characters."
)
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(apikey)
def authenticate_credentials(self, key):
try:
apikey = APIKey.objects.select_related("user").get(key=key)
except APIKey.DoesNotExist:
raise exceptions.AuthenticationFailed(_("Invalid token."))
if not apikey.user.is_active:
raise exceptions.AuthenticationFailed(_("User inactive or deleted."))
# check if token is expired
if apikey.expiration and apikey.expiration < djangotime.now():
raise exceptions.AuthenticationFailed(_("The token as expired."))
return (apikey.user, apikey.key)

View File

@@ -35,9 +35,13 @@ app.conf.beat_schedule = {
"task": "agents.tasks.auto_self_agent_update_task", "task": "agents.tasks.auto_self_agent_update_task",
"schedule": crontab(minute=35, hour="*"), "schedule": crontab(minute=35, hour="*"),
}, },
"monitor-agents": { "handle-agents": {
"task": "agents.tasks.monitor_agents_task", "task": "agents.tasks.handle_agents_task",
"schedule": crontab(minute="*/7"), "schedule": crontab(minute="*"),
},
"get-agentinfo": {
"task": "agents.tasks.agent_getinfo_task",
"schedule": crontab(minute="*"),
}, },
"get-wmi": { "get-wmi": {
"task": "agents.tasks.get_wmi_task", "task": "agents.tasks.get_wmi_task",

View File

@@ -1,43 +0,0 @@
SECRET_KEY = 'changeme'
ALLOWED_HOSTS = ['api.example.com']
ADMIN_URL = "somerandomstring/"
CORS_ORIGIN_WHITELIST = ["https://rmm.example.com",]
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'tacticalrmm',
'USER': 'tacticalrmm',
'PASSWORD': 'changeme',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': "%b-%d-%Y - %H:%M",
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = "changeme"
MESH_SITE = "https://mesh.example.com"
MESH_TOKEN_KEY = "changeme"
REDIS_HOST = "localhost"

View File

@@ -2,6 +2,7 @@ import threading
from django.conf import settings from django.conf import settings
from rest_framework.exceptions import AuthenticationFailed from rest_framework.exceptions import AuthenticationFailed
from ipware import get_client_ip
request_local = threading.local() request_local = threading.local()
@@ -67,6 +68,7 @@ class AuditMiddleware:
debug_info["view_func"] = view_func.__name__ debug_info["view_func"] = view_func.__name__
debug_info["view_args"] = view_args debug_info["view_args"] = view_args
debug_info["view_kwargs"] = view_kwargs debug_info["view_kwargs"] = view_kwargs
debug_info["ip"] = request._client_ip
request_local.debug_info = debug_info request_local.debug_info = debug_info
@@ -83,3 +85,15 @@ class AuditMiddleware:
request_local.debug_info = None request_local.debug_info = None
request_local.username = None request_local.username = None
return response return response
class LogIPMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
client_ip, is_routable = get_client_ip(request)
request._client_ip = client_ip
response = self.get_response(request)
return response

View File

@@ -1,3 +1,7 @@
from rest_framework import permissions
from tacticalrmm.auth import APIAuthentication
def _has_perm(request, perm): def _has_perm(request, perm):
if request.user.is_superuser or ( if request.user.is_superuser or (
request.user.role and getattr(request.user.role, "is_superuser") request.user.role and getattr(request.user.role, "is_superuser")

View File

@@ -15,23 +15,25 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
AUTH_USER_MODEL = "accounts.User" AUTH_USER_MODEL = "accounts.User"
# latest release # latest release
TRMM_VERSION = "0.7.2" TRMM_VERSION = "0.8.4"
# bump this version everytime vue code is changed # bump this version everytime vue code is changed
# to alert user they need to manually refresh their browser # to alert user they need to manually refresh their browser
APP_VER = "0.0.141" APP_VER = "0.0.146"
# https://github.com/wh1te909/rmmagent # https://github.com/wh1te909/rmmagent
LATEST_AGENT_VER = "1.5.9" LATEST_AGENT_VER = "1.6.2"
MESH_VER = "0.8.60" MESH_VER = "0.9.16"
NATS_SERVER_VER = "2.3.3"
# for the update script, bump when need to recreate venv or npm install # for the update script, bump when need to recreate venv or npm install
PIP_VER = "19" PIP_VER = "21"
NPM_VER = "19" NPM_VER = "22"
SETUPTOOLS_VER = "57.0.0" SETUPTOOLS_VER = "57.5.0"
WHEEL_VER = "0.36.2" WHEEL_VER = "0.37.0"
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe" DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe" DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
@@ -56,6 +58,21 @@ try:
except ImportError: except ImportError:
pass pass
REST_FRAMEWORK = {
"DATETIME_FORMAT": "%b-%d-%Y - %H:%M",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
"DEFAULT_AUTHENTICATION_CLASSES": (
"knox.auth.TokenAuthentication",
"tacticalrmm.auth.APIAuthentication",
),
}
if not "AZPIPELINE" in os.environ:
if not DEBUG: # type: ignore
REST_FRAMEWORK.update(
{"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",)}
)
INSTALLED_APPS = [ INSTALLED_APPS = [
"django.contrib.auth", "django.contrib.auth",
"django.contrib.contenttypes", "django.contrib.contenttypes",
@@ -109,6 +126,7 @@ MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware", "django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware", "django.contrib.sessions.middleware.SessionMiddleware",
"corsheaders.middleware.CorsMiddleware", ## "corsheaders.middleware.CorsMiddleware", ##
"tacticalrmm.middleware.LogIPMiddleware",
"django.middleware.common.CommonMiddleware", "django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware", "django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware",
@@ -173,12 +191,23 @@ STATIC_URL = "/static/"
STATIC_ROOT = os.path.join(BASE_DIR, "static") STATIC_ROOT = os.path.join(BASE_DIR, "static")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "tacticalrmm/static/")] STATICFILES_DIRS = [os.path.join(BASE_DIR, "tacticalrmm/static/")]
LOGGING = {
LOG_CONFIG = { "version": 1,
"handlers": [{"sink": os.path.join(LOG_DIR, "debug.log"), "serialize": False}] "disable_existing_loggers": False,
"handlers": {
"file": {
"level": "ERROR",
"class": "logging.FileHandler",
"filename": os.path.join(LOG_DIR, "django_debug.log"),
}
},
"loggers": {
"django.request": {"handlers": ["file"], "level": "ERROR", "propagate": True}
},
} }
if "AZPIPELINE" in os.environ: if "AZPIPELINE" in os.environ:
print("PIPELINE")
DATABASES = { DATABASES = {
"default": { "default": {
"ENGINE": "django.db.backends.postgresql", "ENGINE": "django.db.backends.postgresql",
@@ -193,7 +222,10 @@ if "AZPIPELINE" in os.environ:
REST_FRAMEWORK = { REST_FRAMEWORK = {
"DATETIME_FORMAT": "%b-%d-%Y - %H:%M", "DATETIME_FORMAT": "%b-%d-%Y - %H:%M",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",), "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
"DEFAULT_AUTHENTICATION_CLASSES": ("knox.auth.TokenAuthentication",), "DEFAULT_AUTHENTICATION_CLASSES": (
"knox.auth.TokenAuthentication",
"tacticalrmm.auth.APIAuthentication",
),
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",), "DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
} }

View File

@@ -4,7 +4,8 @@ from unittest.mock import mock_open, patch
import requests import requests
from django.conf import settings from django.conf import settings
from django.test import TestCase, override_settings from django.test import override_settings
from tacticalrmm.test import TacticalTestCase
from .utils import ( from .utils import (
bitdays_to_string, bitdays_to_string,
@@ -16,7 +17,10 @@ from .utils import (
) )
class TestUtils(TestCase): class TestUtils(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
@patch("requests.post") @patch("requests.post")
@patch("__main__.__builtins__.open", new_callable=mock_open) @patch("__main__.__builtins__.open", new_callable=mock_open)
def test_generate_winagent_exe_success(self, m_open, mock_post): def test_generate_winagent_exe_success(self, m_open, mock_post):
@@ -77,7 +81,7 @@ class TestUtils(TestCase):
@patch("subprocess.run") @patch("subprocess.run")
def test_run_nats_api_cmd(self, mock_subprocess): def test_run_nats_api_cmd(self, mock_subprocess):
ids = ["a", "b", "c"] ids = ["a", "b", "c"]
_ = run_nats_api_cmd("monitor", ids) _ = run_nats_api_cmd("wmi", ids)
mock_subprocess.assert_called_once() mock_subprocess.assert_called_once()
def test_bitdays_to_string(self): def test_bitdays_to_string(self):

View File

@@ -15,14 +15,12 @@ from django.conf import settings
from django.contrib.auth.models import AnonymousUser from django.contrib.auth.models import AnonymousUser
from django.http import FileResponse from django.http import FileResponse
from knox.auth import TokenAuthentication from knox.auth import TokenAuthentication
from loguru import logger
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
from agents.models import Agent
from core.models import CodeSignToken from core.models import CodeSignToken
from logs.models import DebugLog
logger.configure(**settings.LOG_CONFIG) from agents.models import Agent
notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST) notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
@@ -61,7 +59,7 @@ def generate_winagent_exe(
) )
try: try:
codetoken = CodeSignToken.objects.first().token codetoken = CodeSignToken.objects.first().token # type:ignore
base_url = get_exegen_url() + "/api/v1/winagents/?" base_url = get_exegen_url() + "/api/v1/winagents/?"
params = { params = {
"version": settings.LATEST_AGENT_VER, "version": settings.LATEST_AGENT_VER,
@@ -107,7 +105,7 @@ def generate_winagent_exe(
break break
if errors: if errors:
logger.error(errors) DebugLog.error(message=errors)
return notify_error( return notify_error(
"Something went wrong. Check debug error log for exact error message" "Something went wrong. Check debug error log for exact error message"
) )
@@ -123,7 +121,7 @@ def generate_winagent_exe(
def get_default_timezone(): def get_default_timezone():
from core.models import CoreSettings from core.models import CoreSettings
return pytz.timezone(CoreSettings.objects.first().default_time_zone) return pytz.timezone(CoreSettings.objects.first().default_time_zone) # type:ignore
def get_bit_days(days: list[str]) -> int: def get_bit_days(days: list[str]) -> int:
@@ -178,28 +176,28 @@ def filter_software(sw: SoftwareList) -> SoftwareList:
def reload_nats(): def reload_nats():
users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}] users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}]
agents = Agent.objects.prefetch_related("user").only("pk", "agent_id") agents = Agent.objects.prefetch_related("user").only(
"pk", "agent_id"
) # type:ignore
for agent in agents: for agent in agents:
try: try:
users.append( users.append(
{"user": agent.agent_id, "password": agent.user.auth_token.key} {"user": agent.agent_id, "password": agent.user.auth_token.key}
) )
except: except:
logger.critical( DebugLog.critical(
f"{agent.hostname} does not have a user account, NATS will not work" agent=agent,
log_type="agent_issues",
message=f"{agent.hostname} does not have a user account, NATS will not work",
) )
domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1] domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1]
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
if hasattr(settings, "CERT_FILE") and hasattr(settings, "KEY_FILE"): if hasattr(settings, "CERT_FILE") and hasattr(settings, "KEY_FILE"):
if os.path.exists(settings.CERT_FILE) and os.path.exists(settings.KEY_FILE): if os.path.exists(settings.CERT_FILE) and os.path.exists(settings.KEY_FILE):
cert_file = settings.CERT_FILE cert_file = settings.CERT_FILE
key_file = settings.KEY_FILE key_file = settings.KEY_FILE
else:
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
else:
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
config = { config = {
"tls": { "tls": {
@@ -207,7 +205,7 @@ def reload_nats():
"key_file": key_file, "key_file": key_file,
}, },
"authorization": {"users": users}, "authorization": {"users": users},
"max_payload": 2048576005, "max_payload": 67108864,
} }
conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf") conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf")
@@ -248,21 +246,36 @@ KnoxAuthMiddlewareStack = lambda inner: KnoxAuthMiddlewareInstance(
) )
def run_nats_api_cmd(mode: str, ids: list[str], timeout: int = 30) -> None: def run_nats_api_cmd(mode: str, ids: list[str] = [], timeout: int = 30) -> None:
config = { if mode == "wmi":
"key": settings.SECRET_KEY, config = {
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222", "key": settings.SECRET_KEY,
"agents": ids, "natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
} "agents": ids,
with tempfile.NamedTemporaryFile() as fp: }
else:
db = settings.DATABASES["default"]
config = {
"key": settings.SECRET_KEY,
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
"user": db["USER"],
"pass": db["PASSWORD"],
"host": db["HOST"],
"port": int(db["PORT"]),
"dbname": db["NAME"],
}
with tempfile.NamedTemporaryFile(
dir="/opt/tactical/tmp" if settings.DOCKER_BUILD else None
) as fp:
with open(fp.name, "w") as f: with open(fp.name, "w") as f:
json.dump(config, f) json.dump(config, f)
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", mode] cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", mode]
try: try:
subprocess.run(cmd, capture_output=True, timeout=timeout) subprocess.run(cmd, timeout=timeout)
except Exception as e: except Exception as e:
logger.error(e) DebugLog.error(message=e)
def get_latest_trmm_ver() -> str: def get_latest_trmm_ver() -> str:
@@ -277,15 +290,16 @@ def get_latest_trmm_ver() -> str:
if "TRMM_VERSION" in line: if "TRMM_VERSION" in line:
return line.split(" ")[2].strip('"') return line.split(" ")[2].strip('"')
except Exception as e: except Exception as e:
logger.error(e) DebugLog.error(message=e)
return "error" return "error"
def replace_db_values( def replace_db_values(
string: str, agent: Agent = None, shell: str = None, quotes=True string: str, instance=None, shell: str = None, quotes=True # type:ignore
) -> Union[str, None]: ) -> Union[str, None]:
from core.models import CustomField, GlobalKVStore from core.models import CustomField, GlobalKVStore
from clients.models import Client, Site
# split by period if exists. First should be model and second should be property i.e {{client.name}} # split by period if exists. First should be model and second should be property i.e {{client.name}}
temp = string.split(".") temp = string.split(".")
@@ -293,7 +307,7 @@ def replace_db_values(
# check for model and property # check for model and property
if len(temp) < 2: if len(temp) < 2:
# ignore arg since it is invalid # ignore arg since it is invalid
return None return ""
# value is in the global keystore and replace value # value is in the global keystore and replace value
if temp[0] == "global": if temp[0] == "global":
@@ -302,30 +316,48 @@ def replace_db_values(
return f"'{value}'" if quotes else value return f"'{value}'" if quotes else value
else: else:
logger.error( DebugLog.error(
f"Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store" log_type="scripting",
message=f"{agent.hostname} Couldn't lookup value for: {string}. Make sure it exists in CoreSettings > Key Store", # type:ignore
) )
return None return ""
if not agent: if not instance:
# agent must be set if not global property # instance must be set if not global property
return f"There was an error finding the agent: {agent}" return ""
if temp[0] == "client": if temp[0] == "client":
model = "client" model = "client"
obj = agent.client if isinstance(instance, Client):
obj = instance
elif hasattr(instance, "client"):
obj = instance.client
else:
obj = None
elif temp[0] == "site": elif temp[0] == "site":
model = "site" model = "site"
obj = agent.site if isinstance(instance, Site):
obj = instance
elif hasattr(instance, "site"):
obj = instance.site
else:
obj = None
elif temp[0] == "agent": elif temp[0] == "agent":
model = "agent" model = "agent"
obj = agent if isinstance(instance, Agent):
obj = instance
else:
obj = None
else: else:
# ignore arg since it is invalid # ignore arg since it is invalid
logger.error( DebugLog.error(
f"Not enough information to find value for: {string}. Only agent, site, client, and global are supported." log_type="scripting",
message=f"{instance} Not enough information to find value for: {string}. Only agent, site, client, and global are supported.",
) )
return None return ""
if not obj:
return ""
if hasattr(obj, temp[1]): if hasattr(obj, temp[1]):
value = f"'{getattr(obj, temp[1])}'" if quotes else getattr(obj, temp[1]) value = f"'{getattr(obj, temp[1])}'" if quotes else getattr(obj, temp[1])
@@ -359,19 +391,21 @@ def replace_db_values(
else: else:
# ignore arg since property is invalid # ignore arg since property is invalid
logger.error( DebugLog.error(
f"Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property" log_type="scripting",
message=f"{instance} Couldn't find property on supplied variable: {string}. Make sure it exists as a custom field or a valid agent property",
) )
return None return ""
# log any unhashable type errors # log any unhashable type errors
if value != None: if value != None:
return value # type: ignore return value # type: ignore
else: else:
logger.error( DebugLog.error(
f"Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property" log_type="scripting",
message=f" {instance}({instance.pk}) Couldn't lookup value for: {string}. Make sure it exists as a custom field or a valid agent property",
) )
return None return ""
def format_shell_array(value: list) -> str: def format_shell_array(value: list) -> str:

View File

@@ -3,15 +3,12 @@ import datetime as dt
import time import time
import pytz import pytz
from django.conf import settings
from django.utils import timezone as djangotime from django.utils import timezone as djangotime
from loguru import logger
from packaging import version as pyver from packaging import version as pyver
from agents.models import Agent from agents.models import Agent
from tacticalrmm.celery import app from tacticalrmm.celery import app
from logs.models import DebugLog
logger.configure(**settings.LOG_CONFIG)
@app.task @app.task
@@ -120,7 +117,11 @@ def check_agent_update_schedule_task():
if install: if install:
# initiate update on agent asynchronously and don't worry about ret code # initiate update on agent asynchronously and don't worry about ret code
logger.info(f"Installing windows updates on {agent.salt_id}") DebugLog.info(
agent=agent,
log_type="windows_updates",
message=f"Installing windows updates on {agent.hostname}",
)
nats_data = { nats_data = {
"func": "installwinupdates", "func": "installwinupdates",
"guids": agent.get_approved_update_guids(), "guids": agent.get_approved_update_guids(),

View File

@@ -8,7 +8,7 @@ jobs:
strategy: strategy:
matrix: matrix:
Debian10: Debian10:
AGENT_NAME: "azpipelines-deb10" AGENT_NAME: "az-pipeline-fran"
pool: pool:
name: linux-vms name: linux-vms
@@ -20,6 +20,7 @@ jobs:
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline' sudo -u postgres psql -c 'DROP DATABASE IF EXISTS pipeline'
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline' sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test_pipeline'
sudo -u postgres psql -c 'CREATE DATABASE pipeline' sudo -u postgres psql -c 'CREATE DATABASE pipeline'
sudo -u postgres psql -c "SET client_encoding = 'UTF8'" pipeline
SETTINGS_FILE="/myagent/_work/1/s/api/tacticalrmm/tacticalrmm/settings.py" SETTINGS_FILE="/myagent/_work/1/s/api/tacticalrmm/tacticalrmm/settings.py"
rm -rf /myagent/_work/1/s/api/env rm -rf /myagent/_work/1/s/api/env
cd /myagent/_work/1/s/api cd /myagent/_work/1/s/api

View File

@@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
SCRIPT_VERSION="14" SCRIPT_VERSION="15"
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh' SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
GREEN='\033[0;32m' GREEN='\033[0;32m'
@@ -80,7 +80,7 @@ if [ -f "${sysd}/daphne.service" ]; then
sudo cp ${sysd}/daphne.service ${tmp_dir}/systemd/ sudo cp ${sysd}/daphne.service ${tmp_dir}/systemd/
fi fi
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz cat /rmm/api/tacticalrmm/tacticalrmm/private/log/django_debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/ cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/
cp /rmm/web/.env ${tmp_dir}/rmm/env cp /rmm/web/.env ${tmp_dir}/rmm/env
cp /rmm/api/tacticalrmm/tacticalrmm/private/exe/mesh*.exe ${tmp_dir}/rmm/ cp /rmm/api/tacticalrmm/tacticalrmm/private/exe/mesh*.exe ${tmp_dir}/rmm/

View File

@@ -15,6 +15,7 @@ MESH_USER=tactical
MESH_PASS=tactical MESH_PASS=tactical
MONGODB_USER=mongouser MONGODB_USER=mongouser
MONGODB_PASSWORD=mongopass MONGODB_PASSWORD=mongopass
MESH_PERSISTENT_CONFIG=0
# database settings # database settings
POSTGRES_USER=postgres POSTGRES_USER=postgres

View File

@@ -9,14 +9,19 @@ set -e
: "${MONGODB_HOST:=tactical-mongodb}" : "${MONGODB_HOST:=tactical-mongodb}"
: "${MONGODB_PORT:=27017}" : "${MONGODB_PORT:=27017}"
: "${NGINX_HOST_IP:=172.20.0.20}" : "${NGINX_HOST_IP:=172.20.0.20}"
: "${MESH_PERSISTENT_CONFIG:=0}"
mkdir -p /home/node/app/meshcentral-data mkdir -p /home/node/app/meshcentral-data
mkdir -p ${TACTICAL_DIR}/tmp mkdir -p ${TACTICAL_DIR}/tmp
if [ ! -f "/home/node/app/meshcentral-data/config.json" ] || [[ "${MESH_PERSISTENT_CONFIG}" -eq 0 ]]; then
encoded_uri=$(node -p "encodeURI('mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}')")
mesh_config="$(cat << EOF mesh_config="$(cat << EOF
{ {
"settings": { "settings": {
"mongodb": "mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}", "mongodb": "${encoded_uri}",
"Cert": "${MESH_HOST}", "Cert": "${MESH_HOST}",
"TLSOffload": "${NGINX_HOST_IP}", "TLSOffload": "${NGINX_HOST_IP}",
"RedirPort": 80, "RedirPort": 80,
@@ -54,11 +59,19 @@ EOF
echo "${mesh_config}" > /home/node/app/meshcentral-data/config.json echo "${mesh_config}" > /home/node/app/meshcentral-data/config.json
fi
node node_modules/meshcentral --createaccount ${MESH_USER} --pass ${MESH_PASS} --email example@example.com node node_modules/meshcentral --createaccount ${MESH_USER} --pass ${MESH_PASS} --email example@example.com
node node_modules/meshcentral --adminaccount ${MESH_USER} node node_modules/meshcentral --adminaccount ${MESH_USER}
if [ ! -f "${TACTICAL_DIR}/tmp/mesh_token" ]; then if [ ! -f "${TACTICAL_DIR}/tmp/mesh_token" ]; then
node node_modules/meshcentral --logintokenkey > ${TACTICAL_DIR}/tmp/mesh_token mesh_token=$(node node_modules/meshcentral --logintokenkey)
if [[ ${#mesh_token} -eq 160 ]]; then
echo ${mesh_token} > /opt/tactical/tmp/mesh_token
else
echo "Failed to generate mesh token. Fix the error and restart the mesh container"
fi
fi fi
# wait for nginx container # wait for nginx container

View File

@@ -1,4 +1,4 @@
FROM nats:2.2.6-alpine FROM nats:2.3.3-alpine
ENV TACTICAL_DIR /opt/tactical ENV TACTICAL_DIR /opt/tactical
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready

View File

@@ -5,6 +5,7 @@ set -e
: "${WORKER_CONNECTIONS:=2048}" : "${WORKER_CONNECTIONS:=2048}"
: "${APP_PORT:=80}" : "${APP_PORT:=80}"
: "${API_PORT:=80}" : "${API_PORT:=80}"
: "${DEV:=0}"
CERT_PRIV_PATH=${TACTICAL_DIR}/certs/privkey.pem CERT_PRIV_PATH=${TACTICAL_DIR}/certs/privkey.pem
CERT_PUB_PATH=${TACTICAL_DIR}/certs/fullchain.pem CERT_PUB_PATH=${TACTICAL_DIR}/certs/fullchain.pem
@@ -28,6 +29,34 @@ fi
/bin/bash -c "sed -i 's/worker_connections.*/worker_connections ${WORKER_CONNECTIONS};/g' /etc/nginx/nginx.conf" /bin/bash -c "sed -i 's/worker_connections.*/worker_connections ${WORKER_CONNECTIONS};/g' /etc/nginx/nginx.conf"
if [ $DEV -eq 1 ]; then
API_NGINX="
#Using variable to disable start checks
set \$api http://tactical-backend:${API_PORT};
proxy_pass \$api;
proxy_http_version 1.1;
proxy_cache_bypass \$http_upgrade;
proxy_set_header Upgrade \$http_upgrade;
proxy_set_header Connection \"upgrade\";
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
proxy_set_header X-Forwarded-Host \$host;
proxy_set_header X-Forwarded-Port \$server_port;
"
else
API_NGINX="
#Using variable to disable start checks
set \$api tactical-backend:${API_PORT};
include uwsgi_params;
uwsgi_pass \$api;
"
fi
nginx_config="$(cat << EOF nginx_config="$(cat << EOF
# backend config # backend config
server { server {
@@ -36,21 +65,7 @@ server {
server_name ${API_HOST}; server_name ${API_HOST};
location / { location / {
#Using variable to disable start checks ${API_NGINX}
set \$api http://tactical-backend:${API_PORT};
proxy_pass \$api;
proxy_http_version 1.1;
proxy_cache_bypass \$http_upgrade;
proxy_set_header Upgrade \$http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto \$scheme;
proxy_set_header X-Forwarded-Host \$host;
proxy_set_header X-Forwarded-Port \$server_port;
} }
location /static/ { location /static/ {

View File

@@ -1,5 +1,5 @@
# creates python virtual env # creates python virtual env
FROM python:3.9.2-slim AS CREATE_VENV_STAGE FROM python:3.9.6-slim AS CREATE_VENV_STAGE
ARG DEBIAN_FRONTEND=noninteractive ARG DEBIAN_FRONTEND=noninteractive
@@ -18,13 +18,12 @@ RUN apt-get update && \
apt-get install -y --no-install-recommends gcc libc6-dev && \ apt-get install -y --no-install-recommends gcc libc6-dev && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
pip install --upgrade pip && \ pip install --upgrade pip && \
pip install --no-cache-dir setuptools wheel gunicorn && \ pip install --no-cache-dir setuptools wheel && \
sed -i '/uWSGI/d' ${TACTICAL_TMP_DIR}/api/requirements.txt && \
pip install --no-cache-dir -r ${TACTICAL_TMP_DIR}/api/requirements.txt pip install --no-cache-dir -r ${TACTICAL_TMP_DIR}/api/requirements.txt
# runtime image # runtime image
FROM python:3.9.2-slim FROM python:3.9.6-slim
# set env variables # set env variables
ENV VIRTUAL_ENV /opt/venv ENV VIRTUAL_ENV /opt/venv

View File

@@ -36,7 +36,8 @@ if [ "$1" = 'tactical-init' ]; then
mkdir -p ${TACTICAL_DIR}/tmp mkdir -p ${TACTICAL_DIR}/tmp
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/exe
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/private/log
touch ${TACTICAL_DIR}/api/tacticalrmm/private/log/django_debug.log
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
echo "waiting for postgresql container to be ready..." echo "waiting for postgresql container to be ready..."
@@ -87,24 +88,6 @@ DATABASES = {
} }
} }
REST_FRAMEWORK = {
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
MESH_USERNAME = '${MESH_USER}' MESH_USERNAME = '${MESH_USER}'
MESH_SITE = 'https://${MESH_HOST}' MESH_SITE = 'https://${MESH_HOST}'
MESH_TOKEN_KEY = '${MESH_TOKEN}' MESH_TOKEN_KEY = '${MESH_TOKEN}'
@@ -116,6 +99,28 @@ EOF
echo "${localvars}" > ${TACTICAL_DIR}/api/tacticalrmm/local_settings.py echo "${localvars}" > ${TACTICAL_DIR}/api/tacticalrmm/local_settings.py
uwsgiconf="$(cat << EOF
[uwsgi]
chdir = /opt/tactical/api
module = tacticalrmm.wsgi
home = /opt/venv
master = true
processes = 8
threads = 2
enable-threads = true
socket = 0.0.0.0:80
chmod-socket = 660
buffer-size = 65535
vacuum = true
die-on-term = true
max-requests = 2000
EOF
)"
echo "${uwsgiconf}" > ${TACTICAL_DIR}/api/uwsgi.ini
# run migrations and init scripts # run migrations and init scripts
python manage.py migrate --no-input python manage.py migrate --no-input
python manage.py collectstatic --no-input python manage.py collectstatic --no-input
@@ -141,22 +146,7 @@ fi
if [ "$1" = 'tactical-backend' ]; then if [ "$1" = 'tactical-backend' ]; then
check_tactical_ready check_tactical_ready
# Prepare log files and start outputting logs to stdout uwsgi ${TACTICAL_DIR}/api/uwsgi.ini
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log
tail -n 0 -f ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn*.log &
export DJANGO_SETTINGS_MODULE=tacticalrmm.settings
exec gunicorn tacticalrmm.wsgi:application \
--name tactical-backend \
--bind 0.0.0.0:80 \
--workers 5 \
--log-level=info \
--log-file=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log \
--access-logfile=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log \
fi fi
if [ "$1" = 'tactical-celery' ]; then if [ "$1" = 'tactical-celery' ]; then
@@ -170,7 +160,7 @@ if [ "$1" = 'tactical-celerybeat' ]; then
celery -A tacticalrmm beat -l info celery -A tacticalrmm beat -l info
fi fi
# backend container # websocket container
if [ "$1" = 'tactical-websockets' ]; then if [ "$1" = 'tactical-websockets' ]; then
check_tactical_ready check_tactical_ready

View File

@@ -97,6 +97,7 @@ services:
MESH_PASS: ${MESH_PASS} MESH_PASS: ${MESH_PASS}
MONGODB_USER: ${MONGODB_USER} MONGODB_USER: ${MONGODB_USER}
MONGODB_PASSWORD: ${MONGODB_PASSWORD} MONGODB_PASSWORD: ${MONGODB_PASSWORD}
MESH_PERSISTENT_CONFIG: ${MESH_PERSISTENT_CONFIG}
networks: networks:
proxy: proxy:
aliases: aliases:

Some files were not shown because too many files have changed in this diff Show More