Compare commits

..

156 Commits

Author SHA1 Message Date
wh1te909
9a5f01813b Release 0.2.1 2020-11-26 06:20:49 +00:00
wh1te909
0605a3b725 fix uninstall for older agents 2020-11-26 06:20:01 +00:00
wh1te909
09c535f159 Release 0.2.0 2020-11-26 03:43:40 +00:00
wh1te909
7fb11da5df update scripts and bump version 2020-11-26 03:42:27 +00:00
wh1te909
9c9a46499a allow changing of refresh interval for task manager 2020-11-26 01:54:20 +00:00
wh1te909
6fca60261e fix recovery 2020-11-26 01:04:42 +00:00
wh1te909
00537b32ef hide output 2020-11-26 00:38:13 +00:00
wh1te909
8636758a90 fix tests 2020-11-26 00:02:11 +00:00
wh1te909
e39dfbd624 Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2020-11-25 23:48:43 +00:00
wh1te909
6e048b2a12 agent recovery improvements 2020-11-25 23:48:14 +00:00
wh1te909
f9657599c2 update agents UI fixes 2020-11-25 23:45:02 +00:00
wh1te909
42ae3bba9b increase timeout for software list 2020-11-25 23:41:49 +00:00
Josh
2fd56a4bfe lock in mongodb and redis service cotnainers to a specific versions 2020-11-25 23:09:17 +00:00
wh1te909
824bcc5603 black 2020-11-25 22:19:27 +00:00
wh1te909
4fbb613aaa change bg tasks schedule 2020-11-25 21:18:03 +00:00
sadnub
9eb45270f2 Update docker readme 2020-11-25 14:53:38 -05:00
Tragic Bronson
75c61c53e8 Merge pull request #190 from sadnub/develop
Docker Setup
2020-11-24 20:46:05 -08:00
sadnub
2688a47436 fix settings 2020-11-24 23:32:26 -05:00
sadnub
fe3bf4b189 get nats container working 2020-11-24 23:25:34 -05:00
Josh Krawczyk
456cb5ebb2 mesh data fix 2020-11-24 23:25:34 -05:00
Josh Krawczyk
3d91d574b4 docker changes 2020-11-24 23:25:34 -05:00
sadnub
54876c5499 fixes to domain in cert generation 2020-11-24 23:25:34 -05:00
sadnub
d256585284 docker changes 2020-11-24 23:25:34 -05:00
sadnub
bd8f100b43 move tactical docker image to alpine 2020-11-24 23:25:34 -05:00
sadnub
44f05f2dcc nats docker setup 2020-11-24 23:25:34 -05:00
sadnub
43f7f82bdc docker fixes 2020-11-24 23:25:34 -05:00
wh1te909
e902f63211 fix response 2020-11-25 03:40:39 +00:00
wh1te909
129f68e194 remove task that's no longer applicable due to recent db changes 2020-11-25 02:47:44 +00:00
wh1te909
4b37fe12d7 remove task that's no longer applicable due to recent db changes 2020-11-25 02:46:50 +00:00
wh1te909
6de79922c5 fix git failing to switch branches during update 2020-11-25 01:18:26 +00:00
wh1te909
e1a9791f44 move run task to nats 2020-11-25 00:17:12 +00:00
wh1te909
81795f51c6 more cleanup 2020-11-24 21:13:54 +00:00
wh1te909
68dfb11155 style fix 2020-11-24 21:09:15 +00:00
wh1te909
39fc1beb89 one more nats 2020-11-24 10:08:20 +00:00
wh1te909
fe0ddec0f9 move runchecks to nats 2020-11-24 05:49:42 +00:00
wh1te909
9b52b4efd9 move wmi to nats 2020-11-24 05:14:45 +00:00
wh1te909
e90e527603 move bulk cmd/script to nats 2020-11-24 04:09:52 +00:00
wh1te909
a510854741 fix core settings for install script 2020-11-23 07:58:22 +00:00
wh1te909
8935ce4ccf move installed software to nats wh1te909/rmmagent@b5b5297350 2020-11-23 06:59:26 +00:00
wh1te909
f9edc9059a format 2020-11-23 06:15:26 +00:00
wh1te909
db8917a769 move reboot to nats 2020-11-23 05:09:06 +00:00
wh1te909
c2d70cc1c2 more nats, fix tests 2020-11-23 03:58:37 +00:00
wh1te909
3b13c7f9ce move agent uninstall to nats wh1te909/rmmagent@502cc0d3de 2020-11-23 02:19:54 +00:00
wh1te909
b7150d8026 don't update last seen in checkrunner 2020-11-23 01:25:33 +00:00
wh1te909
041830a7f8 bump quasar 2020-11-23 01:15:53 +00:00
wh1te909
a18daf0195 fix tests 2020-11-23 00:47:16 +00:00
wh1te909
5d3dfceb22 remove extra logger 2020-11-23 00:46:48 +00:00
wh1te909
c82855e732 remove travis 2020-11-22 23:37:01 +00:00
wh1te909
956f156018 notify if agent not supported 2020-11-22 23:31:59 +00:00
wh1te909
9b13c35e7f nats winsvc check 2020-11-22 23:06:52 +00:00
wh1te909
bc8e637bba add port 4222 to instructions 2020-11-22 22:50:59 +00:00
wh1te909
f03c28c906 mesh nats 2020-11-22 11:29:47 +00:00
wh1te909
e4b1f39fdc move run script to nats 2020-11-22 10:32:21 +00:00
wh1te909
4780af910c add nats recovery 2020-11-22 04:09:23 +00:00
wh1te909
d61ce5c524 move edit winsvc to nats wh1te909/rmmagent@88085847a5 2020-11-22 00:39:56 +00:00
wh1te909
20ab151f4d start moving win svcs to nats wh1te909/rmmagent@d2c9ec7f6d 2020-11-21 23:14:24 +00:00
wh1te909
8a7be7543a Merge branch 'nats' into develop 2020-11-21 04:00:21 +00:00
wh1te909
3f806aec9c fix scripts 2020-11-21 03:37:39 +00:00
wh1te909
6c273b32bb switch axios url 2020-11-21 03:21:16 +00:00
wh1te909
b986f9d6ee add missing escape 2020-11-21 03:07:59 +00:00
wh1te909
c98cca6b7b Merge branch 'develop' of https://github.com/wh1te909/tacticalrmm into develop 2020-11-21 02:47:41 +00:00
wh1te909
fbec78ede5 cut down on nginx logging 2020-11-21 02:47:23 +00:00
sadnub
c1d9a2d1f1 certificate fixes and mesh setup improvements 2020-11-20 16:42:12 -05:00
sadnub
8a10036f32 fix tests 2020-11-20 10:30:07 -05:00
sadnub
924a3aec0e Update readme.md 2020-11-20 10:21:45 -05:00
sadnub
3b3ac31541 fix certificates 2020-11-20 09:46:05 -05:00
wh1te909
e0cb2f9d0f add new agent update method wh1te909/rmmagent@9ede622837 2020-11-20 09:23:38 +00:00
wh1te909
549b4edb59 self update the update script 2020-11-20 08:09:58 +00:00
Tragic Bronson
67c912aca2 Merge pull request #186 from sadnub/develop
New Docker Setup
2020-11-19 22:59:03 -08:00
sadnub
a74dde5d9e attempt tests fix 2020-11-20 00:04:28 -05:00
sadnub
f7bcd24726 working docker setup with Dockerhub 2020-11-19 23:34:50 -05:00
sadnub
337c900770 fix merge conflicts 2020-11-19 19:18:33 -05:00
Josh Krawczyk
e83e73ead4 finished up docker 2020-11-19 19:03:44 -05:00
wh1te909
24f6f9b063 Release 0.1.8 2020-11-19 07:19:05 +00:00
wh1te909
5dc999360e update deps 2020-11-19 07:18:48 +00:00
Josh Krawczyk
9ec2f6b64d more docker changes 2020-11-18 22:42:45 -05:00
wh1te909
f970592efe eventlog 2020-11-18 07:35:57 +00:00
sadnub
7592c11e99 more docker changes 2020-11-17 23:39:03 -05:00
sadnub
759b05e137 more docker changes 2020-11-17 22:56:16 -05:00
wh1te909
42ebd9ffce procs, cmd and eventlog nats 2020-11-17 08:25:56 +00:00
Josh Krawczyk
bc0fc33966 more docker additions 2020-11-16 21:22:28 -05:00
Josh Krawczyk
f4aab16e39 more docker changes 2020-11-16 14:28:10 -05:00
wh1te909
e91425287c start nats 2020-11-15 07:40:26 +00:00
sadnub
f05908f570 docker rework start 2020-11-14 16:55:05 -05:00
wh1te909
8b351edf9c start docs 2020-11-14 07:51:22 +00:00
wh1te909
93c06eaba0 Release 0.1.7 2020-11-13 09:19:16 +00:00
wh1te909
a8d9fa75d4 fix edit site bug v1.0.7 2020-11-13 09:18:55 +00:00
wh1te909
159ecd3e4f Release 0.1.6 2020-11-12 20:24:46 +00:00
wh1te909
717803c665 version 0.1.6 2020-11-12 20:23:50 +00:00
wh1te909
0d40589e8a agent 1.0.2 2020-11-12 20:18:35 +00:00
wh1te909
8c5544bfad finish dark mode 2020-11-12 20:16:50 +00:00
Tragic Bronson
0c9be9f84f Merge pull request #178 from sadnub/develop
docker api build fix
2020-11-12 11:48:55 -08:00
sadnub
497729ecd6 docker api build fix 2020-11-12 14:02:56 -05:00
wh1te909
21a8efa3b8 more dark mode 2020-11-12 09:36:53 +00:00
wh1te909
c2f942a51e change check desc 2020-11-12 09:21:49 +00:00
wh1te909
63b4b95240 more dark mode tweaks 2020-11-12 07:02:27 +00:00
wh1te909
955f37e005 store dark mode setting in db 2020-11-12 05:23:53 +00:00
Tragic Bronson
cd2ae89b0e Merge pull request #176 from sadnub/develop
audit manager improvements
2020-11-11 16:03:49 -08:00
sadnub
0b013fa438 fix dark mode toggle switch 2020-11-11 17:51:34 -05:00
sadnub
478b657354 dark mode start 2020-11-11 17:43:29 -05:00
sadnub
65b6aabe69 some more fixes an formatting changes 2020-11-11 13:01:11 -05:00
sadnub
3fabae5b5f add some tests 2020-11-11 11:21:02 -05:00
sadnub
96c46a9e12 bump app ver 2020-11-11 11:03:27 -05:00
sadnub
381b93e8eb finsihed up audit manager improvements 2020-11-11 11:03:06 -05:00
sadnub
f51e5b6fbf audit manager rework 2020-11-11 09:55:03 -05:00
wh1te909
20befd1ca2 Release 0.1.5 2020-11-11 05:32:00 +00:00
wh1te909
ac6c6130f8 version 0.1.5 2020-11-11 05:31:32 +00:00
wh1te909
d776a2325c fix filter not resetting text length on clear 2020-11-11 05:30:24 +00:00
wh1te909
4aec4257da Release 0.1.4 2020-11-11 05:05:37 +00:00
wh1te909
d654f856d1 version 0.1.4 2020-11-11 05:05:10 +00:00
wh1te909
8d3b0a2069 optimize query 2020-11-11 02:47:09 +00:00
wh1te909
54a96f35e8 fix slow query 2020-11-10 20:48:51 +00:00
wh1te909
2dc56d72f6 show agent info in take control title #163 2020-11-10 10:11:17 +00:00
wh1te909
4b6ddb535a fix filter 2020-11-10 09:51:37 +00:00
wh1te909
697e2250d4 add back sort 2020-11-10 09:50:16 +00:00
wh1te909
6a75035b04 fix last run column 2020-11-10 08:06:40 +00:00
wh1te909
46b166bc41 fix resetpatchpolicy 2020-11-10 08:05:38 +00:00
wh1te909
6bbc0987ad show correct timezone for checks/tasks last run column #166 2020-11-10 03:12:58 +00:00
wh1te909
8c480b43e2 allow sorting of checks status and more #169 2020-11-10 02:38:35 +00:00
Tragic Bronson
079f6731dd Merge pull request #173 from sadnub/develop
more agent table filter options
2020-11-09 16:03:00 -08:00
sadnub
f99d5754cd add buttons to filter popup to apply and clear filter 2020-11-09 16:54:22 -05:00
sadnub
bf8c41e362 bump app ver 2020-11-09 12:21:30 -05:00
sadnub
7f7bc06eb4 add advanced filter functions to agent table 2020-11-09 12:20:42 -05:00
wh1te909
b507e59359 fix bulk actions 2020-11-09 11:19:30 +00:00
wh1te909
72078ac6bf fix log modal 2020-11-09 10:29:06 +00:00
wh1te909
0db9e082e2 fix audit manager 2020-11-09 10:21:55 +00:00
wh1te909
0c44394a76 fix edit agent 2020-11-09 10:10:40 +00:00
wh1te909
e20aa0cf04 fix deployments 2020-11-09 06:34:11 +00:00
wh1te909
fa30a50a91 fix pending actions 2020-11-09 06:06:15 +00:00
wh1te909
f6629ff12c update reqs 2020-11-08 22:51:50 +00:00
wh1te909
4128e4db73 client tests 2020-11-08 11:13:15 +00:00
wh1te909
34cac5685f client/site modal fixes 2020-11-08 10:11:45 +00:00
Tragic Bronson
4c9b91d536 Merge pull request #171 from sadnub/rework-agent
Rework agent
2020-11-07 14:07:52 -08:00
sadnub
95b95a8998 fix relations view 2020-11-06 17:40:47 -05:00
wh1te909
617738bb28 Release 0.1.3 2020-11-06 21:06:04 +00:00
wh1te909
f6ac15d790 fix auto update for older agents 2020-11-06 21:04:44 +00:00
sadnub
79e1324ead small optimization 2020-11-06 16:03:29 -05:00
sadnub
4ef9f010f0 fix client tree to pull the correct agents in table 2020-11-06 15:55:51 -05:00
sadnub
e6e8865708 fix audit manager 2020-11-06 15:15:18 -05:00
sadnub
33cd8f9b0d fix a few property name issues 2020-11-06 15:06:20 -05:00
sadnub
a7138e019c bump app version 2020-11-06 13:58:42 -05:00
sadnub
049b72bd50 fix install agent modal 2020-11-06 13:53:39 -05:00
sadnub
f3f1987515 fix pending actions 2020-11-06 13:53:39 -05:00
sadnub
a9395d89cd fix bulk actions modal 2020-11-06 13:53:39 -05:00
sadnub
bc2fcee8ba finish fixing tests 2020-11-06 13:53:39 -05:00
sadnub
242ff2ceca fix agent tests 2020-11-06 13:53:39 -05:00
sadnub
70790ac762 fix client and automation tests 2020-11-06 13:52:55 -05:00
sadnub
0f98869b61 Rework clients app and rename client and site property to name 2020-11-06 13:51:18 -05:00
sadnub
9ddc02140f fix up automation app 2020-11-06 13:51:18 -05:00
sadnub
ee631b3d20 add reverse migration function 2020-11-06 13:51:18 -05:00
sadnub
32f56e60d8 most rework finished 2020-11-06 13:51:18 -05:00
sadnub
6102b51d9e create migrations to link to correct site 2020-11-06 13:51:18 -05:00
sadnub
2baee27859 agent rework start 2020-11-06 13:51:18 -05:00
Tragic Bronson
144a3dedbb Merge pull request #165 from sadnub/develop
fix strange test issue
2020-11-02 09:30:57 -08:00
sadnub
f90d966f1a fix strange test issue 2020-11-02 10:18:07 -05:00
wh1te909
b188e2ea97 more agent tasks tests 2020-11-02 10:20:22 +00:00
191 changed files with 16856 additions and 5931 deletions

4
.gitignore vendored
View File

@@ -34,6 +34,7 @@ app.ini
create_services.py
gen_random.py
sync_salt_modules.py
change_times.py
rmm-*.exe
rmm-*.ps1
api/tacticalrmm/accounts/management/commands/*.json
@@ -41,3 +42,6 @@ api/tacticalrmm/accounts/management/commands/random_data.py
versioninfo.go
resource.syso
htmlcov/
docker-compose.dev.yml
docs/.vuepress/dist
nats-rmm.conf

View File

@@ -1,43 +0,0 @@
dist: focal
matrix:
include:
- language: node_js
node_js: "12"
before_install:
- cd web
install:
- npm install
script:
- npm run test:unit
- language: python
python: "3.8"
services:
- redis
addons:
postgresql: "13"
apt:
packages:
- postgresql-13
before_script:
- psql -c 'CREATE DATABASE travisci;' -U postgres
- psql -c "CREATE USER travisci WITH PASSWORD 'travisSuperSekret6645';" -U postgres
- psql -c 'GRANT ALL PRIVILEGES ON DATABASE travisci TO travisci;' -U postgres
- psql -c 'ALTER USER travisci CREATEDB;' -U postgres
before_install:
- cd api/tacticalrmm
install:
- pip install --no-cache-dir --upgrade pip
- pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
- pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
script:
- coverage run manage.py test -v 2
after_success:
- coveralls

View File

@@ -1,6 +1,5 @@
# Tactical RMM
[![Build Status](https://travis-ci.com/wh1te909/tacticalrmm.svg?branch=develop)](https://travis-ci.com/wh1te909/tacticalrmm)
[![Build Status](https://dev.azure.com/dcparsi/Tactical%20RMM/_apis/build/status/wh1te909.tacticalrmm?branchName=develop)](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
[![Coverage Status](https://coveralls.io/repos/github/wh1te909/tacticalrmm/badge.png?branch=develop&kill_cache=1)](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
@@ -64,6 +63,7 @@ sudo ufw allow ssh
sudo ufw allow http
sudo ufw allow https
sudo ufw allow proto tcp from any to any port 4505,4506
sudo ufw allow proto tcp from any to any port 4222
sudo ufw enable && sudo ufw reload
```
@@ -78,7 +78,7 @@ Create A record ```mesh.tacticalrmm.com``` for meshcentral
Download the install script and run it
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/install.sh
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh
chmod +x install.sh
./install.sh
```
@@ -92,17 +92,17 @@ chmod +x install.sh
From the app's dashboard, choose Agents > Install Agent to generate an installer.
## Updating
Download and run [update.sh](./update.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh))
Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh)
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
chmod +x update.sh
./update.sh
```
## Backup
Download [backup.sh](./backup.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh))
Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh)
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
```
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
@@ -121,7 +121,7 @@ Copy backup file to new server
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
```
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/restore.sh
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
```
Run the restore script, passing it the backup tar file as the first argument
@@ -129,14 +129,3 @@ Run the restore script, passing it the backup tar file as the first argument
chmod +x restore.sh
./restore.sh rmm-backup-xxxxxxx.tar
```
## Using another ssl certificate
During the install you can opt out of using the Let's Encrypt certificate. If you do this the script will create a self-signed certificate, so that https continues to work. You can replace the certificates in /certs/example.com/(privkey.pem | pubkey.pem) with your own.
If you are migrating from Let's Encrypt to another certificate provider, you can create the /certs directory and copy your certificates there. It is recommended to do this because this directory will be backed up with the backup script provided. Then modify the nginx configurations to use your new certificates
The cert that is generated is a wildcard certificate and is used in the nginx configurations: rmm.conf, api.conf, and mesh.conf. If you can't generate wildcard certificates you can create a cert for each subdomain and configure each nginx configuration file to use its own certificate. Then restart nginx:
```
sudo systemctl restart nginx
```

View File

@@ -0,0 +1,26 @@
# Generated by Django 3.1.2 on 2020-11-10 20:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("agents", "0024_auto_20201101_2319"),
("accounts", "0005_auto_20201002_1303"),
]
operations = [
migrations.AddField(
model_name="user",
name="agent",
field=models.OneToOneField(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="user",
to="agents.agent",
),
),
]

View File

@@ -0,0 +1,25 @@
# Generated by Django 3.1.2 on 2020-11-01 22:54
from django.db import migrations
def link_agents_to_users(apps, schema_editor):
Agent = apps.get_model("agents", "Agent")
User = apps.get_model("accounts", "User")
for agent in Agent.objects.all():
user = User.objects.filter(username=agent.agent_id).first()
if user:
user.agent = agent
user.save()
class Migration(migrations.Migration):
dependencies = [
("accounts", "0006_user_agent"),
]
operations = [
migrations.RunPython(link_agents_to_users, migrations.RunPython.noop),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.3 on 2020-11-12 00:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0007_update_agent_primary_key'),
]
operations = [
migrations.AddField(
model_name='user',
name='dark_mode',
field=models.BooleanField(default=True),
),
]

View File

@@ -7,6 +7,15 @@ from logs.models import BaseAuditModel
class User(AbstractUser, BaseAuditModel):
is_active = models.BooleanField(default=True)
totp_key = models.CharField(max_length=50, null=True, blank=True)
dark_mode = models.BooleanField(default=True)
agent = models.OneToOneField(
"agents.Agent",
related_name="user",
null=True,
blank=True,
on_delete=models.CASCADE,
)
@staticmethod
def serialize(user):

View File

@@ -195,6 +195,14 @@ class TestUserAction(TacticalTestCase):
self.check_not_authenticated("put", url)
def test_darkmode(self):
url = "/accounts/users/ui/"
data = {"dark_mode": False}
r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("patch", url)
class TestTOTPSetup(TacticalTestCase):
def setUp(self):

View File

@@ -7,4 +7,5 @@ urlpatterns = [
path("users/reset/", views.UserActions.as_view()),
path("users/reset_totp/", views.UserActions.as_view()),
path("users/setup_totp/", views.TOTPSetup.as_view()),
path("users/ui/", views.UserUI.as_view()),
]

View File

@@ -74,8 +74,7 @@ class LoginView(KnoxLoginView):
class GetAddUsers(APIView):
def get(self, request):
agents = Agent.objects.values_list("agent_id", flat=True)
users = User.objects.exclude(username__in=agents)
users = User.objects.filter(agent=None)
return Response(UserSerializer(users, many=True).data)
@@ -157,3 +156,11 @@ class TOTPSetup(APIView):
return Response(TOTPSetupSerializer(user).data)
return Response("totp token already set")
class UserUI(APIView):
def patch(self, request):
user = request.user
user.dark_mode = request.data["dark_mode"]
user.save(update_fields=["dark_mode"])
return Response("ok")

View File

@@ -1,14 +1,35 @@
from .models import Agent
import random
import string
import os
import json
from model_bakery.recipe import Recipe, seq
from itertools import cycle
from django.utils import timezone as djangotime
from django.conf import settings
from .models import Agent
def generate_agent_id(hostname):
rand = "".join(random.choice(string.ascii_letters) for _ in range(35))
return f"{rand}-{hostname}"
def get_wmi_data():
with open(
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
) as f:
return json.load(f)
agent = Recipe(
Agent,
client="Default",
site="Default",
hostname=seq("TestHostname"),
hostname="DESKTOP-TEST123",
version="1.1.0",
monitoring_type=cycle(["workstation", "server"]),
salt_id=generate_agent_id("DESKTOP-TEST123"),
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
)
server_agent = agent.extend(
@@ -49,3 +70,5 @@ agent_with_services = agent.extend(
},
],
)
agent_with_wmi = agent.extend(wmi=get_wmi_data())

View File

@@ -0,0 +1,20 @@
# Generated by Django 3.1.2 on 2020-11-01 22:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('clients', '0006_deployment'),
('agents', '0020_auto_20201025_2129'),
]
operations = [
migrations.AddField(
model_name='agent',
name='site_link',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'),
),
]

View File

@@ -0,0 +1,31 @@
# Generated by Django 3.1.2 on 2020-11-01 22:54
from django.db import migrations
def link_sites_to_agents(apps, schema_editor):
Agent = apps.get_model("agents", "Agent")
Site = apps.get_model("clients", "Site")
for agent in Agent.objects.all():
site = Site.objects.get(client__client=agent.client, site=agent.site)
agent.site_link = site
agent.save()
def reverse(apps, schema_editor):
Agent = apps.get_model("agents", "Agent")
for agent in Agent.objects.all():
agent.site = agent.site_link.site
agent.client = agent.site_link.client.client
agent.save()
class Migration(migrations.Migration):
dependencies = [
("agents", "0021_agent_site_link"),
]
operations = [
migrations.RunPython(link_sites_to_agents, reverse),
]

View File

@@ -0,0 +1,21 @@
# Generated by Django 3.1.2 on 2020-11-01 23:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0022_update_site_primary_key'),
]
operations = [
migrations.RemoveField(
model_name='agent',
name='client',
),
migrations.RemoveField(
model_name='agent',
name='site',
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.2 on 2020-11-01 23:19
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agents', '0023_auto_20201101_2312'),
]
operations = [
migrations.RenameField(
model_name='agent',
old_name='site_link',
new_name='site',
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.3 on 2020-11-22 04:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0024_auto_20201101_2319'),
]
operations = [
migrations.AlterField(
model_name='recoveryaction',
name='mode',
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC')], default='mesh', max_length=50),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.3 on 2020-11-25 23:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0025_auto_20201122_0407'),
]
operations = [
migrations.AlterField(
model_name='recoveryaction',
name='mode',
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC'), ('checkrunner', 'Checkrunner')], default='mesh', max_length=50),
),
]

View File

@@ -7,6 +7,7 @@ from Crypto.Random import get_random_bytes
from Crypto.Hash import SHA3_384
from Crypto.Util.Padding import pad
import validators
import msgpack
import random
import re
import string
@@ -14,6 +15,8 @@ from collections import Counter
from loguru import logger
from packaging import version as pyver
from distutils.version import LooseVersion
from nats.aio.client import Client as NATS
from nats.aio.errors import ErrTimeout
from django.db import models
from django.conf import settings
@@ -44,9 +47,7 @@ class Agent(BaseAuditModel):
boot_time = models.FloatField(null=True, blank=True)
logged_in_username = models.CharField(null=True, blank=True, max_length=255)
last_logged_in_user = models.CharField(null=True, blank=True, max_length=255)
client = models.CharField(max_length=200)
antivirus = models.CharField(default="n/a", max_length=255) # deprecated
site = models.CharField(max_length=150)
monitoring_type = models.CharField(max_length=30)
description = models.CharField(null=True, blank=True, max_length=255)
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
@@ -62,6 +63,13 @@ class Agent(BaseAuditModel):
max_length=255, choices=TZ_CHOICES, null=True, blank=True
)
maintenance_mode = models.BooleanField(default=False)
site = models.ForeignKey(
"clients.Site",
related_name="agents",
null=True,
blank=True,
on_delete=models.SET_NULL,
)
policy = models.ForeignKey(
"automation.Policy",
related_name="agents",
@@ -73,6 +81,14 @@ class Agent(BaseAuditModel):
def __str__(self):
return self.hostname
@property
def client(self):
return self.site.client
@property
def has_nats(self):
return pyver.parse(self.version) >= pyver.parse("1.1.0")
@property
def timezone(self):
# return the default timezone unless the timezone is explicity set per agent
@@ -86,9 +102,9 @@ class Agent(BaseAuditModel):
@property
def arch(self):
if self.operating_system is not None:
if "64 bit" in self.operating_system:
if "64 bit" in self.operating_system or "64bit" in self.operating_system:
return "64"
elif "32 bit" in self.operating_system:
elif "32 bit" in self.operating_system or "32bit" in self.operating_system:
return "32"
return None
@@ -133,11 +149,7 @@ class Agent(BaseAuditModel):
@property
def has_patches_pending(self):
if self.winupdates.filter(action="approve").filter(installed=False).exists():
return True
else:
return False
return self.winupdates.filter(action="approve").filter(installed=False).exists()
@property
def checks(self):
@@ -281,11 +293,9 @@ class Agent(BaseAuditModel):
# returns agent policy merged with a client or site specific policy
def get_patch_policy(self):
from clients.models import Client, Site
# check if site has a patch policy and if so use it
client = Client.objects.get(client=self.client)
site = Site.objects.get(client=client, site=self.site)
site = self.site
core_settings = CoreSettings.objects.first()
patch_policy = None
agent_policy = self.winupdatepolicy.get()
@@ -426,6 +436,37 @@ class Agent(BaseAuditModel):
except Exception:
return "err"
async def nats_cmd(self, data, timeout=30, wait=True):
nc = NATS()
options = {
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
"user": "tacticalrmm",
"password": settings.SECRET_KEY,
"connect_timeout": 3,
"max_reconnect_attempts": 2,
}
try:
await nc.connect(**options)
except:
return "natsdown"
if wait:
try:
msg = await nc.request(
self.agent_id, msgpack.dumps(data), timeout=timeout
)
except ErrTimeout:
ret = "timeout"
else:
ret = msgpack.loads(msg.data)
await nc.close()
return ret
else:
await nc.publish(self.agent_id, msgpack.dumps(data))
await nc.flush()
await nc.close()
def salt_api_cmd(self, **kwargs):
# salt should always timeout first before the requests' timeout
@@ -585,10 +626,7 @@ class Agent(BaseAuditModel):
return "failed"
def not_supported(self, version_added):
if pyver.parse(self.version) < pyver.parse(version_added):
return True
return False
return pyver.parse(self.version) < pyver.parse(version_added)
def delete_superseded_updates(self):
try:
@@ -667,10 +705,10 @@ class AgentOutage(models.Model):
CORE = CoreSettings.objects.first()
CORE.send_mail(
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data overdue",
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue",
(
f"Data has not been received from client {self.agent.client}, "
f"site {self.agent.site}, "
f"Data has not been received from client {self.agent.client.name}, "
f"site {self.agent.site.name}, "
f"agent {self.agent.hostname} "
"within the expected time."
),
@@ -681,10 +719,10 @@ class AgentOutage(models.Model):
CORE = CoreSettings.objects.first()
CORE.send_mail(
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data received",
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received",
(
f"Data has been received from client {self.agent.client}, "
f"site {self.agent.site}, "
f"Data has been received from client {self.agent.client.name}, "
f"site {self.agent.site.name}, "
f"agent {self.agent.hostname} "
"after an interruption in data transmission."
),
@@ -695,7 +733,7 @@ class AgentOutage(models.Model):
CORE = CoreSettings.objects.first()
CORE.send_sms(
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data overdue"
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue"
)
def send_recovery_sms(self):
@@ -703,7 +741,7 @@ class AgentOutage(models.Model):
CORE = CoreSettings.objects.first()
CORE.send_sms(
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data received"
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received"
)
def __str__(self):
@@ -714,6 +752,8 @@ RECOVERY_CHOICES = [
("salt", "Salt"),
("mesh", "Mesh"),
("command", "Command"),
("rpc", "Nats RPC"),
("checkrunner", "Checkrunner"),
]

View File

@@ -1,10 +1,12 @@
import pytz
from rest_framework import serializers
from rest_framework.fields import ReadOnlyField
from .models import Agent, Note
from winupdate.serializers import WinUpdatePolicySerializer
from clients.serializers import ClientSerializer
class AgentSerializer(serializers.ModelSerializer):
@@ -19,6 +21,8 @@ class AgentSerializer(serializers.ModelSerializer):
checks = serializers.ReadOnlyField()
timezone = serializers.ReadOnlyField()
all_timezones = serializers.SerializerMethodField()
client_name = serializers.ReadOnlyField(source="client.name")
site_name = serializers.ReadOnlyField(source="site.name")
def get_all_timezones(self, obj):
return pytz.all_timezones
@@ -35,6 +39,8 @@ class AgentTableSerializer(serializers.ModelSerializer):
status = serializers.ReadOnlyField()
checks = serializers.ReadOnlyField()
last_seen = serializers.SerializerMethodField()
client_name = serializers.ReadOnlyField(source="client.name")
site_name = serializers.ReadOnlyField(source="site.name")
def get_last_seen(self, obj):
if obj.time_zone is not None:
@@ -50,8 +56,8 @@ class AgentTableSerializer(serializers.ModelSerializer):
"id",
"hostname",
"agent_id",
"client",
"site",
"site_name",
"client_name",
"monitoring_type",
"description",
"needs_reboot",
@@ -66,11 +72,13 @@ class AgentTableSerializer(serializers.ModelSerializer):
"last_logged_in_user",
"maintenance_mode",
]
depth = 2
class AgentEditSerializer(serializers.ModelSerializer):
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
all_timezones = serializers.SerializerMethodField()
client = ClientSerializer(read_only=True)
def get_all_timezones(self, obj):
return pytz.all_timezones
@@ -107,6 +115,9 @@ class WinAgentSerializer(serializers.ModelSerializer):
class AgentHostnameSerializer(serializers.ModelSerializer):
client = serializers.ReadOnlyField(source="client.name")
site = serializers.ReadOnlyField(source="site.name")
class Meta:
model = Agent
fields = (

View File

@@ -1,3 +1,4 @@
import asyncio
from loguru import logger
from time import sleep
import random
@@ -11,6 +12,7 @@ from django.conf import settings
from tacticalrmm.celery import app
from agents.models import Agent, AgentOutage
from core.models import CoreSettings
from logs.models import PendingAction
logger.configure(**settings.LOG_CONFIG)
@@ -33,6 +35,9 @@ def send_agent_update_task(pks, version):
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(
f"Unable to determine arch on {agent.salt_id}. Skipping."
)
continue
# golang agent only backwards compatible with py agent 0.11.2
@@ -47,20 +52,43 @@ def send_agent_update_task(pks, version):
else:
url = agent.winagent_dl
inno = agent.win_inno_exe
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
logger.info(
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
)
if agent.has_nats:
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
continue
PendingAction.objects.create(
agent=agent,
action_type="agentupdate",
details={
"url": agent.winagent_dl,
"version": settings.LATEST_AGENT_VER,
"inno": agent.win_inno_exe,
},
)
# TODO
# Salt is deprecated, remove this once salt is gone
else:
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
sleep(10)
@app.task
def auto_self_agent_update_task(test=False):
def auto_self_agent_update_task():
core = CoreSettings.objects.first()
if not core.agent_auto_update:
logger.info("Agent auto update is disabled. Skipping.")
return
q = Agent.objects.only("pk", "version")
@@ -69,6 +97,7 @@ def auto_self_agent_update_task(test=False):
for i in q
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
]
logger.info(f"Updating {len(agents)}")
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
@@ -78,6 +107,9 @@ def auto_self_agent_update_task(test=False):
# skip if we can't determine the arch
if agent.arch is None:
logger.warning(
f"Unable to determine arch on {agent.salt_id}. Skipping."
)
continue
# golang agent only backwards compatible with py agent 0.11.2
@@ -92,15 +124,36 @@ def auto_self_agent_update_task(test=False):
else:
url = agent.winagent_dl
inno = agent.win_inno_exe
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
logger.info(
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
)
if not test:
sleep(10)
if agent.has_nats:
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
continue
PendingAction.objects.create(
agent=agent,
action_type="agentupdate",
details={
"url": agent.winagent_dl,
"version": settings.LATEST_AGENT_VER,
"inno": agent.win_inno_exe,
},
)
# TODO
# Salt is deprecated, remove this once salt is gone
else:
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
sleep(10)
@app.task
@@ -125,7 +178,11 @@ def update_salt_minion_task():
@app.task
def get_wmi_detail_task(pk):
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
if agent.has_nats:
asyncio.run(agent.nats_cmd({"func": "sysinfo"}, wait=False))
else:
agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
return "ok"
@@ -145,7 +202,7 @@ def sync_salt_modules_task(pk):
def batch_sync_modules_task():
# sync modules, split into chunks of 50 agents to not overload salt
agents = Agent.objects.all()
online = [i.salt_id for i in agents if i.status == "online"]
online = [i.salt_id for i in agents]
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
for chunk in chunks:
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
@@ -156,54 +213,59 @@ def batch_sync_modules_task():
def batch_sysinfo_task():
# update system info using WMI
agents = Agent.objects.all()
online = [
i.salt_id
for i in agents
if not i.not_supported("0.11.0") and i.status == "online"
agents_nats = [agent for agent in agents if agent.has_nats]
minions = [
agent.salt_id
for agent in agents
if not agent.has_nats and pyver.parse(agent.version) >= pyver.parse("0.11.0")
]
chunks = (online[i : i + 30] for i in range(0, len(online), 30))
for chunk in chunks:
Agent.salt_batch_async(minions=chunk, func="win_agent.local_sys_info")
sleep(10)
if minions:
Agent.salt_batch_async(minions=minions, func="win_agent.local_sys_info")
for agent in agents_nats:
asyncio.run(agent.nats_cmd({"func": "sysinfo"}, wait=False))
@app.task
def uninstall_agent_task(salt_id):
def uninstall_agent_task(salt_id, has_nats):
attempts = 0
error = False
while 1:
try:
if not has_nats:
while 1:
try:
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "local",
"tgt": salt_id,
"fun": "win_agent.uninstall_agent",
"timeout": 8,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=10,
)
ret = r.json()["return"][0][salt_id]
except Exception:
attempts += 1
else:
if ret != "ok":
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "local",
"tgt": salt_id,
"fun": "win_agent.uninstall_agent",
"timeout": 8,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "pam",
}
],
timeout=10,
)
ret = r.json()["return"][0][salt_id]
except Exception:
attempts += 1
else:
attempts = 0
if ret != "ok":
attempts += 1
else:
attempts = 0
if attempts >= 10:
error = True
break
elif attempts == 0:
break
if attempts >= 10:
error = True
break
elif attempts == 0:
break
if error:
logger.error(f"{salt_id} uninstall failed")

View File

@@ -6,20 +6,42 @@ from model_bakery import baker
from itertools import cycle
from django.conf import settings
from django.utils import timezone as djangotime
from tacticalrmm.test import BaseTestCase, TacticalTestCase
from tacticalrmm.test import TacticalTestCase
from .serializers import AgentSerializer
from winupdate.serializers import WinUpdatePolicySerializer
from .models import Agent
from .tasks import auto_self_agent_update_task, OLD_64_PY_AGENT, OLD_32_PY_AGENT
from .tasks import (
auto_self_agent_update_task,
update_salt_minion_task,
get_wmi_detail_task,
sync_salt_modules_task,
batch_sync_modules_task,
batch_sysinfo_task,
OLD_64_PY_AGENT,
OLD_32_PY_AGENT,
)
from winupdate.models import WinUpdatePolicy
class TestAgentViews(BaseTestCase):
class TestAgentViews(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
client = baker.make("clients.Client", name="Google")
site = baker.make("clients.Site", client=client, name="LA Office")
self.agent = baker.make_recipe(
"agents.online_agent", site=site, version="1.1.0"
)
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
def test_get_patch_policy(self):
# make sure get_patch_policy doesn't error out when agent has policy with
# an empty patch policy
self.agent.policy = self.policy
policy = baker.make("automation.Policy")
self.agent.policy = policy
self.agent.save(update_fields=["policy"])
_ = self.agent.get_patch_policy()
@@ -30,8 +52,8 @@ class TestAgentViews(BaseTestCase):
self.agent.policy = None
self.agent.save(update_fields=["policy"])
self.coresettings.server_policy = self.policy
self.coresettings.workstation_policy = self.policy
self.coresettings.server_policy = policy
self.coresettings.workstation_policy = policy
self.coresettings.save(update_fields=["server_policy", "workstation_policy"])
_ = self.agent.get_patch_policy()
@@ -59,29 +81,29 @@ class TestAgentViews(BaseTestCase):
self.check_not_authenticated("post", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_ping(self, mock_ret):
@patch("agents.models.Agent.nats_cmd")
def test_ping(self, nats_cmd):
url = f"/agents/{self.agent.pk}/ping/"
mock_ret.return_value = "timeout"
nats_cmd.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
ret = {"name": self.agent.hostname, "status": "offline"}
self.assertEqual(r.json(), ret)
mock_ret.return_value = "error"
nats_cmd.return_value = "natsdown"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
ret = {"name": self.agent.hostname, "status": "offline"}
self.assertEqual(r.json(), ret)
mock_ret.return_value = True
nats_cmd.return_value = "pong"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
ret = {"name": self.agent.hostname, "status": "online"}
self.assertEqual(r.json(), ret)
mock_ret.return_value = False
nats_cmd.return_value = "asdasjdaksdasd"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
ret = {"name": self.agent.hostname, "status": "offline"}
@@ -89,33 +111,23 @@ class TestAgentViews(BaseTestCase):
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.nats_cmd")
@patch("agents.tasks.uninstall_agent_task.delay")
def test_uninstall(self, mock_task):
@patch("agents.views.reload_nats")
def test_uninstall(self, reload_nats, mock_task, nats_cmd):
url = "/agents/uninstall/"
data = {"pk": self.agent.pk}
r = self.client.delete(url, data, format="json")
self.assertEqual(r.status_code, 200)
mock_task.assert_called_with(self.agent.salt_id)
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
reload_nats.assert_called_once()
mock_task.assert_called_with(self.agent.salt_id, True)
self.check_not_authenticated("delete", url)
@patch("agents.tasks.uninstall_agent_task.delay")
def test_uninstall_catch_no_user(self, mock_task):
url = "/agents/uninstall/"
data = {"pk": self.agent.pk}
self.agent_user.delete()
r = self.client.delete(url, data, format="json")
self.assertEqual(r.status_code, 200)
mock_task.assert_called_with(self.agent.salt_id)
self.check_not_authenticated("delete", url)
@patch("agents.models.Agent.salt_api_cmd")
@patch("agents.models.Agent.nats_cmd")
def test_get_processes(self, mock_ret):
url = f"/agents/{self.agent.pk}/getprocs/"
@@ -135,82 +147,61 @@ class TestAgentViews(BaseTestCase):
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = "error"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_kill_proc(self, mock_ret):
@patch("agents.models.Agent.nats_cmd")
def test_kill_proc(self, nats_cmd):
url = f"/agents/{self.agent.pk}/8234/killproc/"
mock_ret.return_value = True
nats_cmd.return_value = "ok"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
mock_ret.return_value = False
nats_cmd.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = "error"
nats_cmd.return_value = "process doesn't exist"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
@patch("agents.models.Agent.nats_cmd")
def test_get_event_log(self, mock_ret):
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
with open(
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/eventlograw.json")
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
) as f:
mock_ret.return_value = json.load(f)
with open(
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
) as f:
decoded = json.load(f)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(decoded, r.json())
mock_ret.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = "error"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_power_action(self, mock_ret):
@patch("agents.models.Agent.nats_cmd")
def test_power_action(self, nats_cmd):
url = f"/agents/poweraction/"
data = {"pk": self.agent.pk, "action": "rebootnow"}
mock_ret.return_value = True
nats_cmd.return_value = "ok"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
nats_cmd.assert_called_with({"func": "rebootnow"}, timeout=10)
mock_ret.return_value = "error"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
mock_ret.return_value = False
nats_cmd.return_value = "timeout"
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("post", url)
@patch("agents.models.Agent.salt_api_cmd")
@patch("agents.models.Agent.nats_cmd")
def test_send_raw_cmd(self, mock_ret):
url = f"/agents/sendrawcmd/"
@@ -229,10 +220,6 @@ class TestAgentViews(BaseTestCase):
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
mock_ret.return_value = False
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 400)
self.check_not_authenticated("post", url)
@patch("agents.models.Agent.salt_api_cmd")
@@ -278,9 +265,10 @@ class TestAgentViews(BaseTestCase):
def test_install_agent(self, mock_subprocess, mock_file_exists):
url = f"/agents/installagent/"
site = baker.make("clients.Site")
data = {
"client": "Google",
"site": "LA Office",
"client": site.client.id,
"site": site.id,
"arch": "64",
"expires": 23,
"installMethod": "exe",
@@ -382,12 +370,14 @@ class TestAgentViews(BaseTestCase):
self.check_not_authenticated("get", url)
def test_edit_agent(self):
# setup data
site = baker.make("clients.Site", name="Ny Office")
url = "/agents/editagent/"
edit = {
"id": self.agent.pk,
"client": "Facebook",
"site": "NY Office",
"site": site.id,
"monitoring_type": "workstation",
"description": "asjdk234andasd",
"overdue_time": 300,
@@ -417,7 +407,7 @@ class TestAgentViews(BaseTestCase):
agent = Agent.objects.get(pk=self.agent.pk)
data = AgentSerializer(agent).data
self.assertEqual(data["site"], "NY Office")
self.assertEqual(data["site"], site.id)
policy = WinUpdatePolicy.objects.get(agent=self.agent)
data = WinUpdatePolicySerializer(policy).data
@@ -441,6 +431,8 @@ class TestAgentViews(BaseTestCase):
self.assertIn("mstsc.html?login=", r.data["webrdp"])
self.assertEqual(self.agent.hostname, r.data["hostname"])
self.assertEqual(self.agent.client.name, r.data["client"])
self.assertEqual(self.agent.site.name, r.data["site"])
self.assertEqual(r.status_code, 200)
@@ -451,28 +443,28 @@ class TestAgentViews(BaseTestCase):
self.check_not_authenticated("get", url)
def test_by_client(self):
url = "/agents/byclient/Google/"
url = f"/agents/byclient/{self.agent.client.id}/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertTrue(r.data)
url = f"/agents/byclient/Majh3 Akj34 ad/"
url = f"/agents/byclient/500/"
r = self.client.get(url)
self.assertFalse(r.data) # returns empty list
self.check_not_authenticated("get", url)
def test_by_site(self):
url = f"/agents/bysite/Google/Main Office/"
url = f"/agents/bysite/{self.agent.site.id}/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertTrue(r.data)
url = f"/agents/bysite/Google/Ajdaksd Office/"
url = f"/agents/bysite/500/"
r = self.client.get(url)
self.assertFalse(r.data)
self.assertEqual(r.data, [])
self.check_not_authenticated("get", url)
@@ -536,12 +528,14 @@ class TestAgentViews(BaseTestCase):
self.check_not_authenticated("get", url)
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
@patch("scripts.tasks.handle_bulk_script_task.delay")
@patch("scripts.tasks.handle_bulk_command_task.delay")
@patch("agents.models.Agent.salt_batch_async")
def test_bulk_cmd_script(self, mock_ret, mock_update):
def test_bulk_cmd_script(
self, salt_batch_async, bulk_command, bulk_script, mock_update
):
url = "/agents/bulk/"
mock_ret.return_value = "ok"
payload = {
"mode": "command",
"target": "agents",
@@ -556,6 +550,7 @@ class TestAgentViews(BaseTestCase):
}
r = self.client.post(url, payload, format="json")
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
self.assertEqual(r.status_code, 200)
payload = {
@@ -575,7 +570,7 @@ class TestAgentViews(BaseTestCase):
payload = {
"mode": "command",
"target": "client",
"client": "Google",
"client": self.agent.client.id,
"site": None,
"agentPKs": [
self.agent.pk,
@@ -587,12 +582,13 @@ class TestAgentViews(BaseTestCase):
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
payload = {
"mode": "command",
"target": "client",
"client": "Google",
"site": "Main Office",
"client": self.agent.client.id,
"site": self.agent.site.id,
"agentPKs": [
self.agent.pk,
],
@@ -603,28 +599,7 @@ class TestAgentViews(BaseTestCase):
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
payload = {
"mode": "command",
"target": "site",
"client": "A ASJDHkjASHDASD",
"site": "asdasdasdasda",
"agentPKs": [
self.agent.pk,
],
"cmd": "gpupdate /force",
"timeout": 300,
"shell": "cmd",
}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 404)
mock_ret.return_value = "timeout"
payload["client"] = "Google"
payload["site"] = "Main Office"
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
payload = {
"mode": "scan",
@@ -635,20 +610,20 @@ class TestAgentViews(BaseTestCase):
self.agent.pk,
],
}
mock_ret.return_value = "ok"
r = self.client.post(url, payload, format="json")
mock_update.assert_called_once()
mock_update.assert_called_with(minions=[self.agent.salt_id])
self.assertEqual(r.status_code, 200)
payload = {
"mode": "install",
"target": "client",
"client": "Google",
"client": self.agent.client.id,
"site": None,
"agentPKs": [
self.agent.pk,
],
}
salt_batch_async.return_value = "ok"
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
@@ -664,41 +639,18 @@ class TestAgentViews(BaseTestCase):
self.check_not_authenticated("post", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_restart_mesh(self, mock_ret):
url = f"/agents/{self.agent.pk}/restartmesh/"
mock_ret.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = "error"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = False
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = True
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_recover_mesh(self, mock_ret):
@patch("agents.models.Agent.nats_cmd")
def test_recover_mesh(self, nats_cmd):
url = f"/agents/{self.agent.pk}/recovermesh/"
mock_ret.return_value = True
nats_cmd.return_value = "ok"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertIn(self.agent.hostname, r.data)
nats_cmd.assert_called_with(
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
)
mock_ret.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
mock_ret.return_value = "error"
nats_cmd.return_value = "timeout"
r = self.client.get(url)
self.assertEqual(r.status_code, 400)
@@ -712,7 +664,6 @@ class TestAgentViews(BaseTestCase):
class TestAgentViewsNew(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
def test_agent_counts(self):
url = "/agents/agent_counts/"
@@ -748,13 +699,13 @@ class TestAgentViewsNew(TacticalTestCase):
def test_agent_maintenance_mode(self):
url = "/agents/maintenance/"
# create data
client = baker.make("clients.Client", client="Default")
site = baker.make("clients.Site", client=client, site="Site")
agent = baker.make_recipe("agents.agent", client=client.client, site=site.site)
# setup data
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.agent", site=site)
# Test client toggle maintenance mode
data = {"type": "Client", "id": client.id, "action": True}
data = {"type": "Client", "id": site.client.id, "action": True}
r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200)
@@ -782,8 +733,140 @@ class TestAgentViewsNew(TacticalTestCase):
self.check_not_authenticated("post", url)
class TestAgentTasks(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
@patch("agents.models.Agent.nats_cmd")
@patch("agents.models.Agent.salt_api_async", return_value=None)
def test_get_wmi_detail_task(self, salt_api_async, nats_cmd):
self.agent_salt = baker.make_recipe("agents.agent", version="1.0.2")
ret = get_wmi_detail_task.s(self.agent_salt.pk).apply()
salt_api_async.assert_called_with(timeout=30, func="win_agent.local_sys_info")
self.assertEqual(ret.status, "SUCCESS")
self.agent_nats = baker.make_recipe("agents.agent", version="1.1.0")
ret = get_wmi_detail_task.s(self.agent_nats.pk).apply()
nats_cmd.assert_called_with({"func": "sysinfo"}, wait=False)
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.salt_api_cmd")
def test_sync_salt_modules_task(self, salt_api_cmd):
self.agent = baker.make_recipe("agents.agent")
salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]}
ret = sync_salt_modules_task.s(self.agent.pk).apply()
salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules")
self.assertEqual(
ret.result, f"Successfully synced salt modules on {self.agent.hostname}"
)
self.assertEqual(ret.status, "SUCCESS")
salt_api_cmd.return_value = "timeout"
ret = sync_salt_modules_task.s(self.agent.pk).apply()
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
salt_api_cmd.return_value = "error"
ret = sync_salt_modules_task.s(self.agent.pk).apply()
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
@patch("agents.models.Agent.salt_batch_async", return_value=None)
@patch("agents.tasks.sleep", return_value=None)
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
# chunks of 50, should run 4 times
baker.make_recipe(
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
)
baker.make_recipe(
"agents.overdue_agent",
last_seen=djangotime.now() - djangotime.timedelta(minutes=9),
_quantity=115,
)
ret = batch_sync_modules_task.s().apply()
self.assertEqual(salt_batch_async.call_count, 4)
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.nats_cmd")
@patch("agents.models.Agent.salt_batch_async", return_value=None)
@patch("agents.tasks.sleep", return_value=None)
def test_batch_sysinfo_task(self, mock_sleep, salt_batch_async, nats_cmd):
self.agents_nats = baker.make_recipe(
"agents.agent", version="1.1.0", _quantity=20
)
# test nats
ret = batch_sysinfo_task.s().apply()
self.assertEqual(nats_cmd.call_count, 20)
nats_cmd.assert_called_with({"func": "sysinfo"}, wait=False)
self.assertEqual(ret.status, "SUCCESS")
self.agents_salt = baker.make_recipe(
"agents.agent", version="1.0.2", _quantity=70
)
minions = [i.salt_id for i in self.agents_salt]
ret = batch_sysinfo_task.s().apply()
self.assertEqual(salt_batch_async.call_count, 1)
salt_batch_async.assert_called_with(
minions=minions, func="win_agent.local_sys_info"
)
self.assertEqual(ret.status, "SUCCESS")
salt_batch_async.reset_mock()
[i.delete() for i in self.agents_salt]
# test old agents, should not run
self.agents_old = baker.make_recipe(
"agents.agent", version="0.10.2", _quantity=70
)
ret = batch_sysinfo_task.s().apply()
salt_batch_async.assert_not_called()
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.salt_api_async", return_value=None)
@patch("agents.tasks.sleep", return_value=None)
def test_update_salt_minion_task(self, mock_sleep, salt_api_async):
# test agents that need salt update
self.agents = baker.make_recipe(
"agents.agent",
version=settings.LATEST_AGENT_VER,
salt_ver="1.0.3",
_quantity=53,
)
ret = update_salt_minion_task.s().apply()
self.assertEqual(salt_api_async.call_count, 53)
self.assertEqual(ret.status, "SUCCESS")
[i.delete() for i in self.agents]
salt_api_async.reset_mock()
# test agents that need salt update but agent version too low
self.agents = baker.make_recipe(
"agents.agent",
version="0.10.2",
salt_ver="1.0.3",
_quantity=53,
)
ret = update_salt_minion_task.s().apply()
self.assertEqual(ret.status, "SUCCESS")
salt_api_async.assert_not_called()
[i.delete() for i in self.agents]
salt_api_async.reset_mock()
# test agents already on latest salt ver
self.agents = baker.make_recipe(
"agents.agent",
version=settings.LATEST_AGENT_VER,
salt_ver=settings.LATEST_SALT_VER,
_quantity=53,
)
ret = update_salt_minion_task.s().apply()
self.assertEqual(ret.status, "SUCCESS")
salt_api_async.assert_not_called()
@patch("agents.models.Agent.salt_api_async")
def test_auto_self_agent_update_task(self, salt_api_async):
@patch("agents.tasks.sleep", return_value=None)
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
# test 64bit golang agent
self.agent64 = baker.make_recipe(
"agents.agent",
@@ -791,7 +874,7 @@ class TestAgentViewsNew(TacticalTestCase):
version="1.0.0",
)
salt_api_async.return_value = True
ret = auto_self_agent_update_task.s(test=True).apply()
ret = auto_self_agent_update_task.s().apply()
salt_api_async.assert_called_with(
func="win_agent.do_agent_update_v2",
kwargs={
@@ -810,7 +893,7 @@ class TestAgentViewsNew(TacticalTestCase):
version="1.0.0",
)
salt_api_async.return_value = True
ret = auto_self_agent_update_task.s(test=True).apply()
ret = auto_self_agent_update_task.s().apply()
salt_api_async.assert_called_with(
func="win_agent.do_agent_update_v2",
kwargs={
@@ -828,7 +911,7 @@ class TestAgentViewsNew(TacticalTestCase):
operating_system=None,
version="1.0.0",
)
ret = auto_self_agent_update_task.s(test=True).apply()
ret = auto_self_agent_update_task.s().apply()
salt_api_async.assert_not_called()
self.agentNone.delete()
salt_api_async.reset_mock()
@@ -841,7 +924,7 @@ class TestAgentViewsNew(TacticalTestCase):
)
self.coresettings.agent_auto_update = False
self.coresettings.save(update_fields=["agent_auto_update"])
ret = auto_self_agent_update_task.s(test=True).apply()
ret = auto_self_agent_update_task.s().apply()
salt_api_async.assert_not_called()
# reset core settings
@@ -857,7 +940,7 @@ class TestAgentViewsNew(TacticalTestCase):
version="0.11.1",
)
salt_api_async.return_value = True
ret = auto_self_agent_update_task.s(test=True).apply()
ret = auto_self_agent_update_task.s().apply()
salt_api_async.assert_called_with(
func="win_agent.do_agent_update_v2",
kwargs={
@@ -876,7 +959,7 @@ class TestAgentViewsNew(TacticalTestCase):
version="0.11.1",
)
salt_api_async.return_value = True
ret = auto_self_agent_update_task.s(test=True).apply()
ret = auto_self_agent_update_task.s().apply()
salt_api_async.assert_called_with(
func="win_agent.do_agent_update_v2",
kwargs={
@@ -884,4 +967,4 @@ class TestAgentViewsNew(TacticalTestCase):
"url": OLD_32_PY_AGENT,
},
)
self.assertEqual(ret.status, "SUCCESS")
self.assertEqual(ret.status, "SUCCESS")

View File

@@ -5,8 +5,8 @@ urlpatterns = [
path("listagents/", views.AgentsTableList.as_view()),
path("listagentsnodetail/", views.list_agents_no_detail),
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
path("byclient/<client>/", views.by_client),
path("bysite/<client>/<site>/", views.by_site),
path("byclient/<int:clientpk>/", views.by_client),
path("bysite/<int:sitepk>/", views.by_site),
path("overdueaction/", views.overdue_action),
path("sendrawcmd/", views.send_raw_cmd),
path("<pk>/agentdetail/", views.agent_detail),
@@ -25,7 +25,6 @@ urlpatterns = [
path("<int:pk>/ping/", views.ping),
path("recover/", views.recover),
path("runscript/", views.run_script),
path("<int:pk>/restartmesh/", views.restart_mesh),
path("<int:pk>/recovermesh/", views.recover_mesh),
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),

View File

@@ -1,9 +1,7 @@
import asyncio
from loguru import logger
import os
import subprocess
import zlib
import json
import base64
import pytz
import datetime as dt
from packaging import version as pyver
@@ -18,9 +16,6 @@ from rest_framework.response import Response
from rest_framework import status, generics
from .models import Agent, AgentOutage, RecoveryAction, Note
from winupdate.models import WinUpdatePolicy
from clients.models import Client, Site
from accounts.models import User
from core.models import CoreSettings
from scripts.models import Script
from logs.models import AuditLog
@@ -37,9 +32,9 @@ from winupdate.serializers import WinUpdatePolicySerializer
from .tasks import uninstall_agent_task, send_agent_update_task
from winupdate.tasks import bulk_check_for_updates_task
from scripts.tasks import run_script_bg_task, run_bulk_script_task
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
from tacticalrmm.utils import notify_error
from tacticalrmm.utils import notify_error, reload_nats
logger.configure(**settings.LOG_CONFIG)
@@ -66,33 +61,32 @@ def update_agents(request):
@api_view()
def ping(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=5, func="test.ping")
if r == "timeout" or r == "error":
return Response({"name": agent.hostname, "status": "offline"})
if isinstance(r, bool) and r:
return Response({"name": agent.hostname, "status": "online"})
status = "offline"
if agent.has_nats:
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
if r == "pong":
status = "online"
else:
return Response({"name": agent.hostname, "status": "offline"})
r = agent.salt_api_cmd(timeout=5, func="test.ping")
if isinstance(r, bool) and r:
status = "online"
return Response({"name": agent.hostname, "status": status})
@api_view(["DELETE"])
def uninstall(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
# just in case agent-user gets deleted accidentaly from django-admin
# we can still remove the agent
try:
user = User.objects.get(username=agent.agent_id)
user.delete()
except Exception as e:
logger.warning(e)
if agent.has_nats:
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
salt_id = agent.salt_id
name = agent.hostname
has_nats = agent.has_nats
agent.delete()
reload_nats()
uninstall_agent_task.delay(salt_id)
uninstall_agent_task.delay(salt_id, has_nats)
return Response(f"{name} will now be uninstalled.")
@@ -103,7 +97,7 @@ def edit_agent(request):
a_serializer.is_valid(raise_exception=True)
a_serializer.save()
policy = WinUpdatePolicy.objects.get(agent=agent)
policy = agent.winupdatepolicy.get()
p_serializer = WinUpdatePolicySerializer(
instance=policy, data=request.data["winupdatepolicy"][0]
)
@@ -145,6 +139,8 @@ def meshcentral(request, pk):
"file": file,
"webrdp": webrdp,
"status": agent.status,
"client": agent.client.name,
"site": agent.site.name,
}
return Response(ret)
@@ -158,12 +154,11 @@ def agent_detail(request, pk):
@api_view()
def get_processes(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=20, func="win_agent.get_procs")
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error":
return notify_error("Something went wrong")
return Response(r)
@@ -171,15 +166,17 @@ def get_processes(request, pk):
@api_view()
def kill_proc(request, pk, pid):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=25, func="ps.kill_pid", arg=int(pid))
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
r = asyncio.run(
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
)
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error":
return notify_error("Something went wrong")
if isinstance(r, bool) and not r:
return notify_error("Unable to kill the process")
elif r != "ok":
return notify_error(r)
return Response("ok")
@@ -187,33 +184,32 @@ def kill_proc(request, pk, pid):
@api_view()
def get_event_log(request, pk, logtype, days):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(
timeout=30,
func="win_agent.get_eventlog",
arg=[logtype, int(days)],
)
if r == "timeout" or r == "error":
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
data = {
"func": "eventlog",
"timeout": 30,
"payload": {
"logname": logtype,
"days": str(days),
},
}
r = asyncio.run(agent.nats_cmd(data, timeout=32))
if r == "timeout":
return notify_error("Unable to contact the agent")
return Response(json.loads(zlib.decompress(base64.b64decode(r["wineventlog"]))))
return Response(r)
@api_view(["POST"])
def power_action(request):
pk = request.data["pk"]
action = request.data["action"]
agent = get_object_or_404(Agent, pk=pk)
if action == "rebootnow":
logger.info(f"{agent.hostname} was scheduled for immediate reboot")
r = agent.salt_api_cmd(
timeout=30,
func="system.reboot",
arg=3,
kwargs={"in_seconds": True},
)
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
return notify_error("Unable to contact the agent")
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
if request.data["action"] == "rebootnow":
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
if r != "ok":
return notify_error("Unable to contact the agent")
return Response("ok")
@@ -221,21 +217,21 @@ def power_action(request):
@api_view(["POST"])
def send_raw_cmd(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
r = agent.salt_api_cmd(
timeout=request.data["timeout"],
func="cmd.run",
kwargs={
"cmd": request.data["cmd"],
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
timeout = int(request.data["timeout"])
data = {
"func": "rawcmd",
"timeout": timeout,
"payload": {
"command": request.data["cmd"],
"shell": request.data["shell"],
"timeout": request.data["timeout"],
},
)
}
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error" or not r:
return notify_error("Something went wrong")
AuditLog.audit_raw_command(
username=request.user.username,
@@ -244,29 +240,31 @@ def send_raw_cmd(request):
shell=request.data["shell"],
)
logger.info(f"The command {request.data['cmd']} was sent on agent {agent.hostname}")
return Response(r)
class AgentsTableList(generics.ListAPIView):
queryset = Agent.objects.prefetch_related("agentchecks").only(
"pk",
"hostname",
"agent_id",
"client",
"site",
"monitoring_type",
"description",
"needs_reboot",
"overdue_text_alert",
"overdue_email_alert",
"overdue_time",
"last_seen",
"boot_time",
"logged_in_username",
"last_logged_in_user",
"time_zone",
"maintenance_mode",
queryset = (
Agent.objects.select_related("site")
.prefetch_related("agentchecks")
.only(
"pk",
"hostname",
"agent_id",
"site",
"monitoring_type",
"description",
"needs_reboot",
"overdue_text_alert",
"overdue_email_alert",
"overdue_time",
"last_seen",
"boot_time",
"logged_in_username",
"last_logged_in_user",
"time_zone",
"maintenance_mode",
)
)
serializer_class = AgentTableSerializer
@@ -281,7 +279,7 @@ class AgentsTableList(generics.ListAPIView):
@api_view()
def list_agents_no_detail(request):
agents = Agent.objects.all()
agents = Agent.objects.select_related("site").only("pk", "hostname", "site")
return Response(AgentHostnameSerializer(agents, many=True).data)
@@ -292,15 +290,15 @@ def agent_edit_details(request, pk):
@api_view()
def by_client(request, client):
def by_client(request, clientpk):
agents = (
Agent.objects.filter(client=client)
Agent.objects.select_related("site")
.filter(site__client_id=clientpk)
.prefetch_related("agentchecks")
.only(
"pk",
"hostname",
"agent_id",
"client",
"site",
"monitoring_type",
"description",
@@ -321,15 +319,15 @@ def by_client(request, client):
@api_view()
def by_site(request, client, site):
def by_site(request, sitepk):
agents = (
Agent.objects.filter(client=client, site=site)
Agent.objects.filter(site_id=sitepk)
.select_related("site")
.prefetch_related("agentchecks")
.only(
"pk",
"hostname",
"agent_id",
"client",
"site",
"monitoring_type",
"description",
@@ -398,8 +396,8 @@ def reboot_later(request):
def install_agent(request):
from knox.models import AuthToken
client = get_object_or_404(Client, client=request.data["client"])
site = get_object_or_404(Site, client=client, site=request.data["site"])
client_id = request.data["client"]
site_id = request.data["site"]
version = settings.LATEST_AGENT_VER
arch = request.data["arch"]
@@ -454,8 +452,8 @@ def install_agent(request):
"build",
f"-ldflags=\"-X 'main.Inno={inno}'",
f"-X 'main.Api={api}'",
f"-X 'main.Client={client.pk}'",
f"-X 'main.Site={site.pk}'",
f"-X 'main.Client={client_id}'",
f"-X 'main.Site={site_id}'",
f"-X 'main.Atype={atype}'",
f"-X 'main.Rdp={rdp}'",
f"-X 'main.Ping={ping}'",
@@ -552,7 +550,7 @@ def install_agent(request):
"&&",
"timeout",
"/t",
"20",
"10",
"/nobreak",
">",
"NUL",
@@ -563,9 +561,9 @@ def install_agent(request):
"--api",
request.data["api"],
"--client-id",
client.pk,
client_id,
"--site-id",
site.pk,
site_id,
"--agent-type",
request.data["agenttype"],
"--auth",
@@ -597,8 +595,8 @@ def install_agent(request):
replace_dict = {
"innosetupchange": inno,
"clientchange": str(client.pk),
"sitechange": str(site.pk),
"clientchange": str(client_id),
"sitechange": str(site_id),
"apichange": request.data["api"],
"atypechange": request.data["agenttype"],
"powerchange": str(request.data["power"]),
@@ -638,35 +636,60 @@ def install_agent(request):
@api_view(["POST"])
def recover(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
mode = request.data["mode"]
if pyver.parse(agent.version) <= pyver.parse("0.9.5"):
return notify_error("Only available in agent version greater than 0.9.5")
if not agent.has_nats:
if mode == "tacagent" or mode == "checkrunner" or mode == "rpc":
return notify_error("Requires agent version 1.1.0 or greater")
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
if agent.has_nats:
if (
mode == "tacagent"
or mode == "checkrunner"
or mode == "salt"
or mode == "mesh"
):
data = {"func": "recover", "payload": {"mode": mode}}
r = asyncio.run(agent.nats_cmd(data, timeout=10))
if r == "ok":
return Response("Successfully completed recovery")
if agent.recoveryactions.filter(last_run=None).exists():
return notify_error(
"A recovery action is currently pending. Please wait for the next agent check-in."
)
if request.data["mode"] == "command" and not request.data["cmd"]:
if mode == "command" and not request.data["cmd"]:
return notify_error("Command is required")
# if we've made it this far and realtime recovery didn't work,
# tacagent service is the fallback recovery so we obv can't use that to recover itself if it's down
if mode == "tacagent":
return notify_error(
"Requires RPC service to be functional. Please recover that first"
)
# we should only get here if all other methods fail
RecoveryAction(
agent=agent,
mode=request.data["mode"],
command=request.data["cmd"] if request.data["mode"] == "command" else None,
mode=mode,
command=request.data["cmd"] if mode == "command" else None,
).save()
return Response(f"Recovery will be attempted on the agent's next check-in")
return Response("Recovery will be attempted on the agent's next check-in")
@api_view(["POST"])
def run_script(request):
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
script = get_object_or_404(Script, pk=request.data["scriptPK"])
output = request.data["output"]
args = request.data["args"]
req_timeout = int(request.data["timeout"]) + 3
AuditLog.audit_script_run(
@@ -675,75 +698,33 @@ def run_script(request):
script=script.name,
)
data = {
"func": "runscript",
"timeout": request.data["timeout"],
"script_args": request.data["args"],
"payload": {
"code": script.code,
"shell": script.shell,
},
}
if output == "wait":
r = agent.salt_api_cmd(
timeout=req_timeout,
func="win_agent.run_script",
kwargs={
"filepath": script.filepath,
"filename": script.filename,
"shell": script.shell,
"timeout": request.data["timeout"],
"args": args,
},
)
if isinstance(r, dict):
if r["stdout"]:
return Response(r["stdout"])
elif r["stderr"]:
return Response(r["stderr"])
else:
try:
r["retcode"]
except KeyError:
return notify_error("Something went wrong")
return Response(f"Return code: {r['retcode']}")
else:
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error":
return notify_error("Something went wrong")
else:
return notify_error(str(r))
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
return Response(r)
else:
data = {
"agentpk": agent.pk,
"scriptpk": script.pk,
"timeout": request.data["timeout"],
"args": args,
}
run_script_bg_task.delay(data)
asyncio.run(agent.nats_cmd(data, wait=False))
return Response(f"{script.name} will now be run on {agent.hostname}")
@api_view()
def restart_mesh(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(func="service.restart", arg="mesh agent", timeout=30)
if r == "timeout" or r == "error":
return notify_error("Unable to contact the agent")
elif isinstance(r, bool) and r:
return Response(f"Restarted Mesh Agent on {agent.hostname}")
else:
return notify_error(f"Failed to restart the Mesh Agent on {agent.hostname}")
@api_view()
def recover_mesh(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(
timeout=60,
func="cmd.run",
kwargs={
"cmd": r'"C:\\Program Files\\TacticalAgent\\tacticalrmm.exe" -m recovermesh',
"timeout": 55,
},
)
if r == "timeout" or r == "error":
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
data = {"func": "recover", "payload": {"mode": "mesh"}}
r = asyncio.run(agent.nats_cmd(data, timeout=45))
if r != "ok":
return notify_error("Unable to contact the agent")
return Response(f"Repaired mesh agent on {agent.hostname}")
@@ -807,76 +788,44 @@ def bulk(request):
return notify_error("Must select at least 1 agent")
if request.data["target"] == "client":
client = get_object_or_404(Client, client=request.data["client"])
agents = Agent.objects.filter(client=client.client)
q = Agent.objects.filter(site__client_id=request.data["client"])
elif request.data["target"] == "site":
client = get_object_or_404(Client, client=request.data["client"])
site = (
Site.objects.filter(client=client).filter(site=request.data["site"]).get()
)
agents = Agent.objects.filter(client=client.client).filter(site=site.site)
q = Agent.objects.filter(site_id=request.data["site"])
elif request.data["target"] == "agents":
agents = Agent.objects.filter(pk__in=request.data["agentPKs"])
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
elif request.data["target"] == "all":
agents = Agent.objects.all()
q = Agent.objects.all()
else:
return notify_error("Something went wrong")
minions = [agent.salt_id for agent in agents]
minions = [agent.salt_id for agent in q]
agents = [agent.pk for agent in q]
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
if request.data["mode"] == "command":
r = Agent.salt_batch_async(
minions=minions,
func="cmd.run_bg",
kwargs={
"cmd": request.data["cmd"],
"shell": request.data["shell"],
"timeout": request.data["timeout"],
},
handle_bulk_command_task.delay(
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
)
if r == "timeout":
return notify_error("Salt API not running")
return Response(f"Command will now be run on {len(minions)} agents")
return Response(f"Command will now be run on {len(agents)} agents")
elif request.data["mode"] == "script":
script = get_object_or_404(Script, pk=request.data["scriptPK"])
if script.shell == "python":
r = Agent.salt_batch_async(
minions=minions,
func="win_agent.run_script",
kwargs={
"filepath": script.filepath,
"filename": script.filename,
"shell": script.shell,
"timeout": request.data["timeout"],
"args": request.data["args"],
"bg": True,
},
)
if r == "timeout":
return notify_error("Salt API not running")
else:
data = {
"minions": minions,
"scriptpk": script.pk,
"timeout": request.data["timeout"],
"args": request.data["args"],
}
run_bulk_script_task.delay(data)
return Response(f"{script.name} will now be run on {len(minions)} agents")
handle_bulk_script_task.delay(
script.pk, agents, request.data["args"], request.data["timeout"]
)
return Response(f"{script.name} will now be run on {len(agents)} agents")
elif request.data["mode"] == "install":
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
if r == "timeout":
return notify_error("Salt API not running")
return Response(
f"Pending updates will now be installed on {len(minions)} agents"
f"Pending updates will now be installed on {len(agents)} agents"
)
elif request.data["mode"] == "scan":
bulk_check_for_updates_task.delay(minions=minions)
return Response(f"Patch status scan will now run on {len(minions)} agents")
return Response(f"Patch status scan will now run on {len(agents)} agents")
return notify_error("Something went wrong")
@@ -904,14 +853,12 @@ def agent_counts(request):
@api_view(["POST"])
def agent_maintenance(request):
if request.data["type"] == "Client":
client = Client.objects.get(pk=request.data["id"])
Agent.objects.filter(client=client.client).update(
Agent.objects.filter(site__client_id=request.data["id"]).update(
maintenance_mode=request.data["action"]
)
elif request.data["type"] == "Site":
site = Site.objects.get(pk=request.data["id"])
Agent.objects.filter(client=site.client.client, site=site.site).update(
Agent.objects.filter(site_id=request.data["id"]).update(
maintenance_mode=request.data["action"]
)

View File

@@ -1,17 +1,20 @@
from tacticalrmm.test import TacticalTestCase
from unittest.mock import patch
from model_bakery import baker
from itertools import cycle
class TestAPIv2(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
self.agent_setup()
@patch("agents.models.Agent.salt_api_cmd")
def test_sync_modules(self, mock_ret):
# setup data
agent = baker.make_recipe("agents.agent")
url = "/api/v2/saltminion/"
payload = {"agent_id": self.agent.agent_id}
payload = {"agent_id": agent.agent_id}
mock_ret.return_value = "error"
r = self.client.patch(url, payload, format="json")

View File

@@ -2,11 +2,18 @@ import os
import json
from django.conf import settings
from tacticalrmm.test import BaseTestCase
from tacticalrmm.test import TacticalTestCase
from unittest.mock import patch
from model_bakery import baker
from itertools import cycle
class TestAPIv3(BaseTestCase):
class TestAPIv3(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
self.agent = baker.make_recipe("agents.agent")
def test_get_checks(self):
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"

View File

@@ -1,3 +1,4 @@
import asyncio
import os
import requests
from loguru import logger
@@ -6,6 +7,7 @@ from django.conf import settings
from django.shortcuts import get_object_or_404
from django.utils import timezone as djangotime
from django.http import HttpResponse
from rest_framework import serializers
from rest_framework.response import Response
from rest_framework.views import APIView
@@ -16,9 +18,7 @@ from rest_framework.authtoken.models import Token
from agents.models import Agent
from checks.models import Check
from autotasks.models import AutomatedTask
from winupdate.models import WinUpdate
from accounts.models import User
from clients.models import Client, Site
from winupdate.models import WinUpdatePolicy
from checks.serializers import CheckRunnerGetSerializerV3
from agents.serializers import WinAgentSerializer
@@ -34,7 +34,7 @@ from agents.tasks import (
from winupdate.tasks import check_for_updates_task
from software.tasks import get_installed_software, install_chocolatey
from checks.utils import bytes2human
from tacticalrmm.utils import notify_error
from tacticalrmm.utils import notify_error, reload_nats
logger.configure(**settings.LOG_CONFIG)
@@ -98,6 +98,17 @@ class Hello(APIView):
recovery.save(update_fields=["last_run"])
return Response(recovery.send())
# handle agent update
if agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).exists():
update = agent.pendingactions.filter(
action_type="agentupdate", status="pending"
).last()
update.status = "completed"
update.save(update_fields=["status"])
return Response(update.details)
# get any pending actions
if agent.pendingactions.filter(status="pending").exists():
agent.handle_pending_actions()
@@ -134,8 +145,6 @@ class CheckRunner(APIView):
def get(self, request, agentid):
agent = get_object_or_404(Agent, agent_id=agentid)
agent.last_seen = djangotime.now()
agent.save(update_fields=["last_seen"])
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
ret = {
@@ -146,10 +155,23 @@ class CheckRunner(APIView):
return Response(ret)
def patch(self, request):
from logs.models import AuditLog
check = get_object_or_404(Check, pk=request.data["id"])
check.last_run = djangotime.now()
check.save(update_fields=["last_run"])
status = check.handle_checkv2(request.data)
# create audit entry
AuditLog.objects.create(
username=check.agent.hostname,
agent=check.agent.hostname,
object_type="agent",
action="check_run",
message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}",
after_value=Check.serialize(check),
)
return Response(status)
@@ -167,6 +189,8 @@ class TaskRunner(APIView):
return Response(TaskGOGetSerializer(task).data)
def patch(self, request, pk, agentid):
from logs.models import AuditLog
agent = get_object_or_404(Agent, agent_id=agentid)
task = get_object_or_404(AutomatedTask, pk=pk)
@@ -175,6 +199,17 @@ class TaskRunner(APIView):
)
serializer.is_valid(raise_exception=True)
serializer.save(last_run=djangotime.now())
new_task = AutomatedTask.objects.get(pk=task.pk)
AuditLog.objects.create(
username=agent.hostname,
agent=agent.hostname,
object_type="agent",
action="task_run",
message=f"Scheduled Task {task.name} was run on {agent.hostname}",
after_value=AutomatedTask.serialize(new_task),
)
return Response("ok")
@@ -308,21 +343,16 @@ class WinUpdater(APIView):
agent.save(update_fields=["needs_reboot"])
if reboot:
r = agent.salt_api_cmd(
timeout=15,
func="system.reboot",
arg=7,
kwargs={"in_seconds": True},
)
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
)
if agent.has_nats:
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
else:
logger.info(
f"{agent.hostname} is rebooting after updates were installed."
agent.salt_api_async(
func="system.reboot",
arg=7,
kwargs={"in_seconds": True},
)
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
else:
check_for_updates_task.apply_async(
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
@@ -387,31 +417,9 @@ class MeshExe(APIView):
class NewAgent(APIView):
""" For the installer """
def post(self, request):
"""
Creates and returns the agents auth token
which is stored in the agent's local db
and used to authenticate every agent request
"""
from logs.models import AuditLog
if "agent_id" not in request.data:
return notify_error("Invalid payload")
agentid = request.data["agent_id"]
if Agent.objects.filter(agent_id=agentid).exists():
return notify_error(
"Agent already exists. Remove old agent first if trying to re-install"
)
user = User.objects.create_user(
username=agentid, password=User.objects.make_random_password(60)
)
token = Token.objects.create(user=user)
return Response({"token": token.key})
def patch(self, request):
""" Creates the agent """
if Agent.objects.filter(agent_id=request.data["agent_id"]).exists():
@@ -419,14 +427,10 @@ class NewAgent(APIView):
"Agent already exists. Remove old agent first if trying to re-install"
)
client = get_object_or_404(Client, pk=int(request.data["client"]))
site = get_object_or_404(Site, pk=int(request.data["site"]))
agent = Agent(
agent_id=request.data["agent_id"],
hostname=request.data["hostname"],
client=client.client,
site=site.site,
site_id=int(request.data["site"]),
monitoring_type=request.data["monitoring_type"],
description=request.data["description"],
mesh_node_id=request.data["mesh_node_id"],
@@ -436,13 +440,39 @@ class NewAgent(APIView):
agent.salt_id = f"{agent.hostname}-{agent.pk}"
agent.save(update_fields=["salt_id"])
user = User.objects.create_user(
username=request.data["agent_id"],
agent=agent,
password=User.objects.make_random_password(60),
)
token = Token.objects.create(user=user)
if agent.monitoring_type == "workstation":
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
else:
WinUpdatePolicy(agent=agent).save()
reload_nats()
# Generate policies for new agent
agent.generate_checks_from_policies()
agent.generate_tasks_from_policies()
return Response({"pk": agent.pk, "saltid": f"{agent.hostname}-{agent.pk}"})
# create agent install audit record
AuditLog.objects.create(
username=request.user,
agent=agent.hostname,
object_type="agent",
action="agent_install",
message=f"{request.user} installed new agent {agent.hostname}",
after_value=Agent.serialize(agent),
)
return Response(
{
"pk": agent.pk,
"saltid": f"{agent.hostname}-{agent.pk}",
"token": token.key,
}
)

View File

@@ -1,6 +1,5 @@
from django.contrib import admin
from .models import Policy, PolicyExclusions
from .models import Policy
admin.site.register(Policy)
admin.site.register(PolicyExclusions)

View File

@@ -0,0 +1,16 @@
# Generated by Django 3.1.2 on 2020-11-02 19:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('automation', '0005_auto_20200922_1344'),
]
operations = [
migrations.DeleteModel(
name='PolicyExclusions',
),
]

View File

@@ -32,16 +32,15 @@ class Policy(BaseAuditModel):
filtered_agents_pks = Policy.objects.none()
for site in explicit_sites:
if site.client not in explicit_clients:
filtered_agents_pks |= Agent.objects.filter(
client=site.client.client,
site=site.site,
monitoring_type=mon_type,
).values_list("pk", flat=True)
filtered_agents_pks |= Agent.objects.filter(
site__in=[
site for site in explicit_sites if site.client not in explicit_clients
],
monitoring_type=mon_type,
).values_list("pk", flat=True)
filtered_agents_pks |= Agent.objects.filter(
client__in=[client.client for client in explicit_clients],
site__client__in=[client for client in explicit_clients],
monitoring_type=mon_type,
).values_list("pk", flat=True)
@@ -68,8 +67,8 @@ class Policy(BaseAuditModel):
]
# Get policies applied to agent and agent site and client
client = Client.objects.get(client=agent.client)
site = Site.objects.filter(client=client).get(site=agent.site)
client = agent.client
site = agent.site
default_policy = None
client_policy = None
@@ -121,8 +120,8 @@ class Policy(BaseAuditModel):
]
# Get policies applied to agent and agent site and client
client = Client.objects.get(client=agent.client)
site = Site.objects.filter(client=client).get(site=agent.site)
client = agent.client
site = agent.site
default_policy = None
client_policy = None
@@ -300,11 +299,3 @@ class Policy(BaseAuditModel):
if tasks:
for task in tasks:
task.create_policy_task(agent)
class PolicyExclusions(models.Model):
policy = models.ForeignKey(
Policy, related_name="exclusions", on_delete=models.CASCADE
)
agents = models.ManyToManyField(Agent, related_name="policy_exclusions")
sites = models.ManyToManyField(Site, related_name="policy_exclusions")

View File

@@ -5,6 +5,9 @@ from rest_framework.serializers import (
ReadOnlyField,
)
from clients.serializers import ClientSerializer, SiteSerializer
from agents.serializers import AgentHostnameSerializer
from .models import Policy
from agents.models import Agent
from autotasks.models import AutomatedTask
@@ -21,11 +24,11 @@ class PolicySerializer(ModelSerializer):
class PolicyTableSerializer(ModelSerializer):
server_clients = StringRelatedField(many=True, read_only=True)
server_sites = StringRelatedField(many=True, read_only=True)
workstation_clients = StringRelatedField(many=True, read_only=True)
workstation_sites = StringRelatedField(many=True, read_only=True)
agents = StringRelatedField(many=True, read_only=True)
server_clients = ClientSerializer(many=True, read_only=True)
server_sites = SiteSerializer(many=True, read_only=True)
workstation_clients = ClientSerializer(many=True, read_only=True)
workstation_sites = SiteSerializer(many=True, read_only=True)
agents = AgentHostnameSerializer(many=True, read_only=True)
default_server_policy = ReadOnlyField(source="is_default_server_policy")
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
agents_count = SerializerMethodField(read_only=True)
@@ -43,7 +46,7 @@ class PolicyTableSerializer(ModelSerializer):
class PolicyOverviewSerializer(ModelSerializer):
class Meta:
model = Client
fields = ("pk", "client", "sites", "workstation_policy", "server_policy")
fields = ("pk", "name", "sites", "workstation_policy", "server_policy")
depth = 2

View File

@@ -71,8 +71,8 @@ class TestPolicyViews(TacticalTestCase):
# create policy with tasks and checks
policy = baker.make("automation.Policy")
checks = self.create_checks(policy=policy)
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
self.create_checks(policy=policy)
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
# test copy tasks and checks to another policy
data = {
@@ -152,7 +152,7 @@ class TestPolicyViews(TacticalTestCase):
# create policy with tasks
policy = baker.make("automation.Policy")
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
url = f"/automation/{policy.pk}/policyautomatedtasks/"
resp = self.client.get(url, format="json")
@@ -202,6 +202,8 @@ class TestPolicyViews(TacticalTestCase):
self.check_not_authenticated("patch", url)
def test_policy_overview(self):
from clients.models import Client
url = "/automation/policies/overview/"
policies = baker.make(
@@ -213,7 +215,7 @@ class TestPolicyViews(TacticalTestCase):
workstation_policy=cycle(policies),
_quantity=5,
)
sites = baker.make(
baker.make(
"clients.Site",
client=cycle(clients),
server_policy=cycle(policies),
@@ -221,8 +223,9 @@ class TestPolicyViews(TacticalTestCase):
_quantity=4,
)
sites = baker.make("clients.Site", client=cycle(clients), _quantity=3)
baker.make("clients.Site", client=cycle(clients), _quantity=3)
resp = self.client.get(url, format="json")
clients = Client.objects.all()
serializer = PolicyOverviewSerializer(clients, many=True)
self.assertEqual(resp.status_code, 200)
@@ -256,31 +259,31 @@ class TestPolicyViews(TacticalTestCase):
# data setup
policy = baker.make("automation.Policy")
client = baker.make("clients.Client", client="Test Client")
site = baker.make("clients.Site", client=client, site="Test Site")
agent = baker.make_recipe("agents.agent", client=client.client, site=site.site)
client = baker.make("clients.Client")
site = baker.make("clients.Site", client=client)
agent = baker.make_recipe("agents.agent", site=site)
# test add client to policy data
client_server_payload = {
"type": "client",
"pk": client.pk,
"pk": agent.client.pk,
"server_policy": policy.pk,
}
client_workstation_payload = {
"type": "client",
"pk": client.pk,
"pk": agent.client.pk,
"workstation_policy": policy.pk,
}
# test add site to policy data
site_server_payload = {
"type": "site",
"pk": site.pk,
"pk": agent.site.pk,
"server_policy": policy.pk,
}
site_workstation_payload = {
"type": "site",
"pk": site.pk,
"pk": agent.site.pk,
"workstation_policy": policy.pk,
}
@@ -293,7 +296,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
@@ -306,7 +309,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -319,7 +322,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.id},
mon_type="server",
clear=True,
create_tasks=True,
@@ -332,7 +335,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -391,7 +394,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
@@ -404,7 +407,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -417,7 +420,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.id},
mon_type="server",
clear=True,
create_tasks=True,
@@ -430,7 +433,7 @@ class TestPolicyViews(TacticalTestCase):
# called because the relation changed
mock_checks_location_task.assert_called_with(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -471,14 +474,14 @@ class TestPolicyViews(TacticalTestCase):
self.check_not_authenticated("post", url)
def test_relation_by_type(self):
def test_get_relation_by_type(self):
url = f"/automation/related/"
# data setup
policy = baker.make("automation.Policy")
client = baker.make("clients.Client", client="Test Client")
site = baker.make("clients.Site", client=client, site="Test Site")
agent = baker.make_recipe("agents.agent", client=client.client, site=site.site)
client = baker.make("clients.Client", workstation_policy=policy)
site = baker.make("clients.Site", server_policy=policy)
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
client_payload = {"type": "client", "pk": client.pk}
@@ -621,43 +624,38 @@ class TestPolicyViews(TacticalTestCase):
"reprocess_failed_inherit": True,
}
# create agents in sites
clients = baker.make("clients.Client", client=seq("Client"), _quantity=3)
sites = baker.make(
"clients.Site", client=cycle(clients), site=seq("Site"), _quantity=6
)
clients = baker.make("clients.Client", _quantity=6)
sites = baker.make("clients.Site", client=cycle(clients), _quantity=10)
agents = baker.make_recipe(
"agents.agent",
client=cycle([x.client for x in clients]),
site=cycle([x.site for x in sites]),
site=cycle(sites),
_quantity=6,
)
# create patch policies
patch_policies = baker.make_recipe(
baker.make_recipe(
"winupdate.winupdate_approve", agent=cycle(agents), _quantity=6
)
# test reset agents in site
data = {"client": clients[0].client, "site": "Site0"}
data = {"site": sites[0].id}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
agents = Agent.objects.filter(client=clients[0].client, site="Site0")
agents = Agent.objects.filter(site=sites[0])
for agent in agents:
for k, v in inherit_fields.items():
self.assertEqual(getattr(agent.winupdatepolicy.get(), k), v)
# test reset agents in client
data = {"client": clients[1].client}
data = {"client": clients[1].id}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
agents = Agent.objects.filter(client=clients[1].client)
agents = Agent.objects.filter(site__client=clients[1])
for agent in agents:
for k, v in inherit_fields.items():
@@ -703,40 +701,24 @@ class TestPolicyTasks(TacticalTestCase):
def test_policy_related(self):
# Get Site and Client from an agent in list
clients = baker.make("clients.Client", client=seq("Client"), _quantity=5)
sites = baker.make(
"clients.Site", client=cycle(clients), site=seq("Site"), _quantity=25
)
clients = baker.make("clients.Client", _quantity=5)
sites = baker.make("clients.Site", client=cycle(clients), _quantity=25)
server_agents = baker.make_recipe(
"agents.server_agent",
client=cycle([x.client for x in clients]),
site=seq("Site"),
site=cycle(sites),
_quantity=25,
)
workstation_agents = baker.make_recipe(
"agents.workstation_agent",
client=cycle([x.client for x in clients]),
site=seq("Site"),
site=cycle(sites),
_quantity=25,
)
server_client = clients[3]
server_site = server_client.sites.all()[3]
workstation_client = clients[1]
workstation_site = server_client.sites.all()[2]
server_agent = baker.make_recipe(
"agents.server_agent", client=server_client.client, site=server_site.site
)
workstation_agent = baker.make_recipe(
"agents.workstation_agent",
client=workstation_client.client,
site=workstation_site.site,
)
policy = baker.make("automation.Policy", active=True)
# Add Client to Policy
policy.server_clients.add(server_client)
policy.workstation_clients.add(workstation_client)
policy.server_clients.add(server_agents[13].client)
policy.workstation_clients.add(workstation_agents[15].client)
resp = self.client.get(
f"/automation/policies/{policy.pk}/related/", format="json"
@@ -747,19 +729,19 @@ class TestPolicyTasks(TacticalTestCase):
self.assertEquals(len(resp.data["server_sites"]), 5)
self.assertEquals(len(resp.data["workstation_clients"]), 1)
self.assertEquals(len(resp.data["workstation_sites"]), 5)
self.assertEquals(len(resp.data["agents"]), 12)
self.assertEquals(len(resp.data["agents"]), 10)
# Add Site to Policy and the agents and sites length shouldn't change
policy.server_sites.add(server_site)
policy.workstation_sites.add(workstation_site)
policy.server_sites.add(server_agents[13].site)
policy.workstation_sites.add(workstation_agents[15].site)
self.assertEquals(len(resp.data["server_sites"]), 5)
self.assertEquals(len(resp.data["workstation_sites"]), 5)
self.assertEquals(len(resp.data["agents"]), 12)
self.assertEquals(len(resp.data["agents"]), 10)
# Add Agent to Policy and the agents length shouldn't change
policy.agents.add(server_agent)
policy.agents.add(workstation_agent)
self.assertEquals(len(resp.data["agents"]), 12)
policy.agents.add(server_agents[13])
policy.agents.add(workstation_agents[15])
self.assertEquals(len(resp.data["agents"]), 10)
def test_generating_agent_policy_checks(self):
from .tasks import generate_agent_checks_from_policies_task
@@ -767,9 +749,8 @@ class TestPolicyTasks(TacticalTestCase):
# setup data
policy = baker.make("automation.Policy", active=True)
checks = self.create_checks(policy=policy)
client = baker.make("clients.Client", client="Default")
baker.make("clients.Site", client=client, site="Default")
agent = baker.make_recipe("agents.agent", policy=policy)
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
# test policy assigned to agent
generate_agent_checks_from_policies_task(policy.id, clear=True)
@@ -815,9 +796,8 @@ class TestPolicyTasks(TacticalTestCase):
policy = baker.make("automation.Policy", active=True, enforced=True)
script = baker.make_recipe("scripts.script")
self.create_checks(policy=policy, script=script)
client = baker.make("clients.Client", client="Default")
baker.make("clients.Site", client=client, site="Default")
agent = baker.make_recipe("agents.agent", policy=policy)
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
self.create_checks(agent=agent, script=script)
generate_agent_checks_from_policies_task(policy.id, create_tasks=True)
@@ -839,25 +819,18 @@ class TestPolicyTasks(TacticalTestCase):
self.create_checks(policy=policy)
clients = baker.make(
"clients.Client",
client=seq("Default"),
_quantity=2,
server_policy=policy,
workstation_policy=policy,
)
baker.make(
"clients.Site", client=cycle(clients), site=seq("Default"), _quantity=4
)
server_agent = baker.make_recipe(
"agents.server_agent", client="Default1", site="Default1"
)
workstation_agent = baker.make_recipe(
"agents.workstation_agent", client="Default1", site="Default3"
)
agent1 = baker.make_recipe("agents.agent", client="Default2", site="Default2")
agent2 = baker.make_recipe("agents.agent", client="Default2", site="Default4")
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
agent1 = baker.make_recipe("agents.server_agent", site=sites[1])
agent2 = baker.make_recipe("agents.workstation_agent", site=sites[3])
generate_agent_checks_by_location_task(
{"client": "Default1", "site": "Default1"},
{"site_id": sites[0].id},
"server",
clear=True,
create_tasks=True,
@@ -871,7 +844,10 @@ class TestPolicyTasks(TacticalTestCase):
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0)
generate_agent_checks_by_location_task(
{"client": "Default1"}, "workstation", clear=True, create_tasks=True
{"site__client_id": clients[0].id},
"workstation",
clear=True,
create_tasks=True,
)
# workstation_agent should now have policy checks and the other agents should not
self.assertEqual(
@@ -888,18 +864,12 @@ class TestPolicyTasks(TacticalTestCase):
# setup data
policy = baker.make("automation.Policy", active=True)
self.create_checks(policy=policy)
clients = baker.make("clients.Client", client=seq("Default"), _quantity=2)
baker.make(
"clients.Site", client=cycle(clients), site=seq("Default"), _quantity=4
site = baker.make("clients.Site")
server_agents = baker.make_recipe("agents.server_agent", site=site, _quantity=3)
workstation_agents = baker.make_recipe(
"agents.workstation_agent", site=site, _quantity=4
)
server_agent = baker.make_recipe(
"agents.server_agent", client="Default1", site="Default1"
)
workstation_agent = baker.make_recipe(
"agents.workstation_agent", client="Default1", site="Default3"
)
agent1 = baker.make_recipe("agents.agent", client="Default2", site="Default2")
agent2 = baker.make_recipe("agents.agent", client="Default2", site="Default4")
core = CoreSettings.objects.first()
core.server_policy = policy
core.workstation_policy = policy
@@ -908,22 +878,20 @@ class TestPolicyTasks(TacticalTestCase):
generate_all_agent_checks_task("server", clear=True, create_tasks=True)
# all servers should have 7 checks
self.assertEqual(
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
)
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 7)
self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 0)
for agent in server_agents:
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
for agent in workstation_agents:
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
generate_all_agent_checks_task("workstation", clear=True, create_tasks=True)
# all agents should have 7 checks now
self.assertEqual(
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7
)
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 7)
self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 7)
for agent in server_agents:
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
for agent in workstation_agents:
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
def test_delete_policy_check(self):
from .tasks import delete_policy_check_task
@@ -931,11 +899,8 @@ class TestPolicyTasks(TacticalTestCase):
policy = baker.make("automation.Policy", active=True)
self.create_checks(policy=policy)
client = baker.make("clients.Client", client="Default", server_policy=policy)
baker.make("clients.Site", client=client, site="Default")
agent = baker.make_recipe(
"agents.server_agent", client="Default", site="Default"
)
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
agent.generate_checks_from_policies()
# make sure agent has 7 checks
@@ -960,11 +925,7 @@ class TestPolicyTasks(TacticalTestCase):
policy = baker.make("automation.Policy", active=True)
self.create_checks(policy=policy)
client = baker.make("clients.Client", client="Default", server_policy=policy)
baker.make("clients.Site", client=client, site="Default")
agent = baker.make_recipe(
"agents.server_agent", client="Default", site="Default"
)
agent = baker.make_recipe("agents.server_agent", policy=policy)
agent.generate_checks_from_policies()
# make sure agent has 7 checks
@@ -997,11 +958,8 @@ class TestPolicyTasks(TacticalTestCase):
tasks = baker.make(
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
)
client = baker.make("clients.Client", client="Default")
baker.make("clients.Site", client=client, site="Default")
agent = baker.make_recipe(
"agents.server_agent", client="Default", site="Default", policy=policy
)
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
generate_agent_tasks_from_policies_task(policy.id, clear=True)
@@ -1027,33 +985,26 @@ class TestPolicyTasks(TacticalTestCase):
# setup data
policy = baker.make("automation.Policy", active=True)
tasks = baker.make(
baker.make(
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
)
clients = baker.make(
"clients.Client",
client=seq("Default"),
_quantity=2,
server_policy=policy,
workstation_policy=policy,
)
baker.make(
"clients.Site", client=cycle(clients), site=seq("Default"), _quantity=4
)
server_agent = baker.make_recipe(
"agents.server_agent", client="Default1", site="Default1"
)
workstation_agent = baker.make_recipe(
"agents.workstation_agent", client="Default1", site="Default3"
)
agent1 = baker.make_recipe("agents.agent", client="Default2", site="Default2")
agent2 = baker.make_recipe("agents.agent", client="Default2", site="Default4")
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
agent1 = baker.make_recipe("agents.agent", site=sites[1])
agent2 = baker.make_recipe("agents.agent", site=sites[3])
generate_agent_tasks_by_location_task(
{"client": "Default1", "site": "Default1"}, "server", clear=True
{"site_id": sites[0].id}, "server", clear=True
)
# all servers in Default1 and site Default1 should have 3 tasks
# all servers in site1 and site2 should have 3 tasks
self.assertEqual(
Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 0
)
@@ -1062,7 +1013,7 @@ class TestPolicyTasks(TacticalTestCase):
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
generate_agent_tasks_by_location_task(
{"client": "Default1"}, "workstation", clear=True
{"site__client_id": clients[0].id}, "workstation", clear=True
)
# all workstations in Default1 should have 3 tasks
@@ -1079,11 +1030,8 @@ class TestPolicyTasks(TacticalTestCase):
policy = baker.make("automation.Policy", active=True)
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
client = baker.make("clients.Client", client="Default", server_policy=policy)
baker.make("clients.Site", client=client, site="Default")
agent = baker.make_recipe(
"agents.server_agent", client="Default", site="Default"
)
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
agent.generate_tasks_from_policies()
delete_policy_autotask_task(tasks[0].id)
@@ -1103,7 +1051,7 @@ class TestPolicyTasks(TacticalTestCase):
for task in tasks:
run_win_task.assert_any_call(task.id)
def test_updated_policy_tasks(self):
def test_update_policy_tasks(self):
from .tasks import update_policy_task_fields_task
from autotasks.models import AutomatedTask
@@ -1112,11 +1060,8 @@ class TestPolicyTasks(TacticalTestCase):
tasks = baker.make(
"autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3
)
client = baker.make("clients.Client", client="Default", server_policy=policy)
baker.make("clients.Site", client=client, site="Default")
agent = baker.make_recipe(
"agents.server_agent", client="Default", site="Default"
)
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
agent.generate_tasks_from_policies()
tasks[0].enabled = False

View File

@@ -1,4 +1,3 @@
from django.db import DataError
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
@@ -12,7 +11,7 @@ from checks.models import Check
from autotasks.models import AutomatedTask
from winupdate.models import WinUpdatePolicy
from clients.serializers import ClientSerializer, TreeSerializer
from clients.serializers import ClientSerializer, SiteSerializer
from agents.serializers import AgentHostnameSerializer
from winupdate.serializers import WinUpdatePolicySerializer
@@ -33,7 +32,6 @@ from .tasks import (
generate_agent_checks_from_policies_task,
generate_agent_checks_by_location_task,
generate_agent_tasks_from_policies_task,
generate_agent_tasks_by_location_task,
run_win_policy_autotask_task,
)
@@ -172,7 +170,7 @@ class GetRelated(APIView):
if site not in policy.server_sites.all():
filtered_server_sites.append(site)
response["server_sites"] = TreeSerializer(
response["server_sites"] = SiteSerializer(
filtered_server_sites + list(policy.server_sites.all()), many=True
).data
@@ -181,7 +179,7 @@ class GetRelated(APIView):
if site not in policy.workstation_sites.all():
filtered_workstation_sites.append(site)
response["workstation_sites"] = TreeSerializer(
response["workstation_sites"] = SiteSerializer(
filtered_workstation_sites + list(policy.workstation_sites.all()), many=True
).data
@@ -218,7 +216,7 @@ class GetRelated(APIView):
client.save()
generate_agent_checks_by_location_task.delay(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -236,7 +234,7 @@ class GetRelated(APIView):
site.workstation_policy = policy
site.save()
generate_agent_checks_by_location_task.delay(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -258,7 +256,7 @@ class GetRelated(APIView):
client.server_policy = policy
client.save()
generate_agent_checks_by_location_task.delay(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
@@ -276,7 +274,7 @@ class GetRelated(APIView):
site.server_policy = policy
site.save()
generate_agent_checks_by_location_task.delay(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.id},
mon_type="server",
clear=True,
create_tasks=True,
@@ -296,7 +294,7 @@ class GetRelated(APIView):
client.workstation_policy = None
client.save()
generate_agent_checks_by_location_task.delay(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -311,7 +309,7 @@ class GetRelated(APIView):
site.workstation_policy = None
site.save()
generate_agent_checks_by_location_task.delay(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.id},
mon_type="workstation",
clear=True,
create_tasks=True,
@@ -329,7 +327,7 @@ class GetRelated(APIView):
client.server_policy = None
client.save()
generate_agent_checks_by_location_task.delay(
location={"client": client.client},
location={"site__client_id": client.id},
mon_type="server",
clear=True,
create_tasks=True,
@@ -343,7 +341,7 @@ class GetRelated(APIView):
site.server_policy = None
site.save()
generate_agent_checks_by_location_task.delay(
location={"client": site.client.client, "site": site.site},
location={"site_id": site.pk},
mon_type="server",
clear=True,
create_tasks=True,
@@ -423,12 +421,10 @@ class UpdatePatchPolicy(APIView):
def patch(self, request):
agents = None
if "client" in request.data and "site" in request.data:
agents = Agent.objects.filter(
client=request.data["client"], site=request.data["site"]
)
elif "client" in request.data:
agents = Agent.objects.filter(client=request.data["client"])
if "client" in request.data:
agents = Agent.objects.filter(site__client_id=request.data["client"])
elif "site" in request.data:
agents = Agent.objects.filter(site_id=request.data["site"])
else:
agents = Agent.objects.all()

View File

@@ -1,3 +1,4 @@
import pytz
import random
import string
import datetime as dt
@@ -122,6 +123,15 @@ class AutomatedTask(BaseAuditModel):
days = ",".join(ret)
return f"{days} at {run_time_nice}"
@property
def last_run_as_timezone(self):
if self.last_run is not None and self.agent is not None:
return self.last_run.astimezone(
pytz.timezone(self.agent.timezone)
).strftime("%b-%d-%Y - %H:%M")
return self.last_run
@staticmethod
def generate_task_name():
chars = string.ascii_letters
@@ -137,7 +147,7 @@ class AutomatedTask(BaseAuditModel):
def create_policy_task(self, agent=None, policy=None):
from .tasks import create_win_task_schedule
# exit is neither are set or if both are set
# exit if neither are set or if both are set
if not agent and not policy or agent and policy:
return

View File

@@ -1,3 +1,4 @@
import pytz
from rest_framework import serializers
from .models import AutomatedTask
@@ -12,6 +13,7 @@ class TaskSerializer(serializers.ModelSerializer):
assigned_check = CheckSerializer(read_only=True)
schedule = serializers.ReadOnlyField()
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
class Meta:
model = AutomatedTask

View File

@@ -206,6 +206,7 @@ def delete_win_task_schedule(pk, pending_action=False):
@app.task
def run_win_task(pk):
# TODO deprecated, remove this function once salt gone
task = AutomatedTask.objects.get(pk=pk)
r = task.agent.salt_api_async(func="task.run", arg=[f"name={task.win_task_name}"])
return "ok"

View File

@@ -181,10 +181,10 @@ class TestAutotaskViews(TacticalTestCase):
self.check_not_authenticated("delete", url)
@patch("autotasks.tasks.run_win_task.delay")
def test_run_autotask(self, run_win_task):
@patch("agents.models.Agent.nats_cmd")
def test_run_autotask(self, nats_cmd):
# setup data
agent = baker.make_recipe("agents.agent")
agent = baker.make_recipe("agents.agent", version="1.1.0")
task = baker.make("autotasks.AutomatedTask", agent=agent)
# test invalid url
@@ -195,7 +195,15 @@ class TestAutotaskViews(TacticalTestCase):
url = f"/tasks/runwintask/{task.id}/"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
run_win_task.assert_called_with(task.id)
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
nats_cmd.reset_mock()
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
url = f"/tasks/runwintask/{task2.id}/"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
nats_cmd.assert_not_called()
self.check_not_authenticated("get", url)

View File

@@ -1,3 +1,5 @@
import asyncio
import pytz
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
@@ -9,13 +11,13 @@ from agents.models import Agent
from checks.models import Check
from scripts.models import Script
from core.models import CoreSettings
from .serializers import TaskSerializer, AutoTaskSerializer
from .tasks import (
create_win_task_schedule,
delete_win_task_schedule,
run_win_task,
enable_or_disable_win_task,
)
from tacticalrmm.utils import notify_error
@@ -68,8 +70,12 @@ class AddAutoTask(APIView):
class AutoTask(APIView):
def get(self, request, pk):
agent = Agent.objects.only("pk").get(pk=pk)
return Response(AutoTaskSerializer(agent).data)
agent = get_object_or_404(Agent, pk=pk)
ctx = {
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone),
"agent_tz": agent.time_zone,
}
return Response(AutoTaskSerializer(agent, context=ctx).data)
def patch(self, request, pk):
from automation.tasks import update_policy_task_fields_task
@@ -108,5 +114,8 @@ class AutoTask(APIView):
@api_view()
def run_task(request, pk):
task = get_object_or_404(AutomatedTask, pk=pk)
run_win_task.delay(task.pk)
if not task.agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
return Response(f"{task.name} will now be run on {task.agent.hostname}")

View File

@@ -1,8 +1,8 @@
import base64
import asyncio
import string
import os
import json
import zlib
import pytz
from statistics import mean
from django.db import models
@@ -177,6 +177,15 @@ class Check(BaseAuditModel):
if self.check_type == "cpuload" or self.check_type == "memory":
return ", ".join(str(f"{x}%") for x in self.history[-6:])
@property
def last_run_as_timezone(self):
if self.last_run is not None and self.agent is not None:
return self.last_run.astimezone(
pytz.timezone(self.agent.timezone)
).strftime("%b-%d-%Y - %H:%M")
return self.last_run
@property
def non_editable_fields(self):
return [
@@ -199,6 +208,10 @@ class Check(BaseAuditModel):
"parent_check",
"managed_by_policy",
"overriden_by_policy",
"created_by",
"created_time",
"modified_by",
"modified_time",
]
def handle_checkv2(self, data):
@@ -292,12 +305,16 @@ class Check(BaseAuditModel):
self.status = "passing"
else:
if self.agent and self.restart_if_stopped:
r = self.agent.salt_api_cmd(
func="service.restart", arg=self.svc_name, timeout=45
)
if r == "timeout" or r == "error":
nats_data = {
"func": "winsvcaction",
"payload": {"name": self.svc_name, "action": "start"},
}
r = asyncio.run(self.agent.nats_cmd(nats_data, timeout=32))
if r == "timeout" or r == "natsdown":
self.status = "failing"
elif isinstance(r, bool) and r:
elif not r["success"] and r["errormsg"]:
self.status = "failing"
elif r["success"]:
self.status = "passing"
self.more_info = f"Status RUNNING"
else:
@@ -322,8 +339,7 @@ class Check(BaseAuditModel):
eventID = self.event_id
source = self.event_source
message = self.event_message
r = json.loads(zlib.decompress(base64.b64decode(data["log"])))
r = data["log"]
for i in r:
if i["eventType"] == eventType:
@@ -518,7 +534,7 @@ class Check(BaseAuditModel):
CORE = CoreSettings.objects.first()
if self.agent:
subject = f"{self.agent.client}, {self.agent.site}, {self} Failed"
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
else:
subject = f"{self} Failed"
@@ -594,7 +610,7 @@ class Check(BaseAuditModel):
CORE = CoreSettings.objects.first()
if self.agent:
subject = f"{self.agent.client}, {self.agent.site}, {self} Failed"
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
else:
subject = f"{self} Failed"

View File

@@ -18,6 +18,7 @@ class CheckSerializer(serializers.ModelSerializer):
readable_desc = serializers.ReadOnlyField()
script = ScriptSerializer(read_only=True)
assigned_task = serializers.SerializerMethodField()
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
history_info = serializers.ReadOnlyField()
## Change to return only array of tasks after 9/25/2020
@@ -47,12 +48,11 @@ class CheckSerializer(serializers.ModelSerializer):
.filter(check_type="diskspace")
.exclude(managed_by_policy=True)
)
if checks:
for check in checks:
if val["disk"] in check.disk:
raise serializers.ValidationError(
f"A disk check for Drive {val['disk']} already exists!"
)
for check in checks:
if val["disk"] in check.disk:
raise serializers.ValidationError(
f"A disk check for Drive {val['disk']} already exists!"
)
# ping checks
if check_type == "ping":

View File

@@ -56,10 +56,3 @@ def handle_check_sms_alert_task(pk):
check.save(update_fields=["text_sent"])
return "ok"
@app.task
def run_checks_task(pk):
agent = Agent.objects.get(pk=pk)
agent.salt_api_async(func="win_agent.run_manual_checks")
return "ok"

View File

@@ -1,26 +1,38 @@
from tacticalrmm.test import BaseTestCase
from tacticalrmm.test import TacticalTestCase
from .serializers import CheckSerializer
from model_bakery import baker
from itertools import cycle
class TestCheckViews(TacticalTestCase):
def setUp(self):
self.authenticate()
class TestCheckViews(BaseTestCase):
def test_get_disk_check(self):
url = f"/checks/{self.agentDiskCheck.pk}/check/"
# setup data
disk_check = baker.make_recipe("checks.diskspace_check")
url = f"/checks/{disk_check.pk}/check/"
resp = self.client.get(url, format="json")
serializer = CheckSerializer(self.agentDiskCheck)
serializer = CheckSerializer(disk_check)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, serializer.data)
self.check_not_authenticated("post", url)
def test_add_disk_check(self):
# setup data
agent = baker.make_recipe("agents.agent")
url = "/checks/checks/"
valid_payload = {
"pk": self.agent.pk,
"pk": agent.pk,
"check": {
"check_type": "diskspace",
"disk": "D:",
"disk": "C:",
"threshold": 55,
"fails_b4_alert": 3,
},
@@ -31,7 +43,7 @@ class TestCheckViews(BaseTestCase):
# this should fail because we already have a check for drive C: in setup
invalid_payload = {
"pk": self.agent.pk,
"pk": agent.pk,
"check": {
"check_type": "diskspace",
"disk": "C:",
@@ -44,23 +56,30 @@ class TestCheckViews(BaseTestCase):
self.assertEqual(resp.status_code, 400)
def test_get_policy_disk_check(self):
url = f"/checks/{self.policyDiskCheck.pk}/check/"
# setup data
policy = baker.make("automation.Policy")
disk_check = baker.make_recipe("checks.diskspace_check", policy=policy)
url = f"/checks/{disk_check.pk}/check/"
resp = self.client.get(url, format="json")
serializer = CheckSerializer(self.policyDiskCheck)
serializer = CheckSerializer(disk_check)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, serializer.data)
self.check_not_authenticated("post", url)
def test_add_policy_disk_check(self):
# setup data
policy = baker.make("automation.Policy")
url = "/checks/checks/"
valid_payload = {
"policy": self.policy.pk,
"policy": policy.pk,
"check": {
"check_type": "diskspace",
"disk": "D:",
"disk": "M:",
"threshold": 86,
"fails_b4_alert": 2,
},
@@ -71,7 +90,7 @@ class TestCheckViews(BaseTestCase):
# this should fail because we already have a check for drive M: in setup
invalid_payload = {
"policy": self.policy.pk,
"policy": policy.pk,
"check": {
"check_type": "diskspace",
"disk": "M:",
@@ -90,8 +109,14 @@ class TestCheckViews(BaseTestCase):
self.assertEqual(26, len(r.data))
def test_edit_check_alert(self):
url_a = f"/checks/{self.agentDiskCheck.pk}/check/"
url_p = f"/checks/{self.policyDiskCheck.pk}/check/"
# setup data
policy = baker.make("automation.Policy")
agent = baker.make_recipe("agents.agent")
policy_disk_check = baker.make_recipe("checks.diskspace_check", policy=policy)
agent_disk_check = baker.make_recipe("checks.diskspace_check", agent=agent)
url_a = f"/checks/{agent_disk_check.pk}/check/"
url_p = f"/checks/{policy_disk_check.pk}/check/"
valid_payload = {"email_alert": False, "check_alert": True}
invalid_payload = {"email_alert": False}

View File

@@ -2,7 +2,7 @@ from django.urls import path
from . import views
urlpatterns = [
path("checks/", views.GetAddCheck.as_view()),
path("checks/", views.AddCheck.as_view()),
path("<int:pk>/check/", views.GetUpdateDeleteCheck.as_view()),
path("<pk>/loadchecks/", views.load_checks),
path("getalldisks/", views.get_disks_for_policies),

View File

@@ -1,3 +1,5 @@
import asyncio
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
@@ -13,7 +15,6 @@ from scripts.models import Script
from .serializers import CheckSerializer
from .tasks import run_checks_task
from automation.tasks import (
generate_agent_checks_from_policies_task,
@@ -22,11 +23,7 @@ from automation.tasks import (
)
class GetAddCheck(APIView):
def get(self, request):
checks = Check.objects.all()
return Response(CheckSerializer(checks, many=True).data)
class AddCheck(APIView):
def post(self, request):
policy = None
agent = None
@@ -182,7 +179,10 @@ class GetUpdateDeleteCheck(APIView):
@api_view()
def run_checks(request, pk):
agent = get_object_or_404(Agent, pk=pk)
run_checks_task.delay(agent.pk)
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
return Response(agent.hostname)

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.1.2 on 2020-11-02 19:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clients', '0006_deployment'),
]
operations = [
migrations.RenameField(
model_name='client',
old_name='client',
new_name='name',
),
migrations.RenameField(
model_name='site',
old_name='site',
new_name='name',
),
]

View File

@@ -0,0 +1,21 @@
# Generated by Django 3.1.2 on 2020-11-03 14:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clients', '0007_auto_20201102_1920'),
]
operations = [
migrations.AlterModelOptions(
name='client',
options={'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',)},
),
]

View File

@@ -7,7 +7,7 @@ from logs.models import BaseAuditModel
class Client(BaseAuditModel):
client = models.CharField(max_length=255, unique=True)
name = models.CharField(max_length=255, unique=True)
workstation_policy = models.ForeignKey(
"automation.Policy",
related_name="workstation_clients",
@@ -24,13 +24,16 @@ class Client(BaseAuditModel):
on_delete=models.SET_NULL,
)
class Meta:
ordering = ("name",)
def __str__(self):
return self.client
return self.name
@property
def has_maintenanace_mode_agents(self):
return (
Agent.objects.filter(client=self.client, maintenance_mode=True).count() > 0
Agent.objects.filter(site__client=self, maintenance_mode=True).count() > 0
)
@property
@@ -44,7 +47,7 @@ class Client(BaseAuditModel):
"last_seen",
"overdue_time",
)
.filter(client=self.client)
.filter(site__client=self)
.prefetch_related("agentchecks")
)
for agent in agents:
@@ -52,8 +55,7 @@ class Client(BaseAuditModel):
return True
if agent.overdue_email_alert or agent.overdue_text_alert:
if agent.status == "overdue":
return True
return agent.status == "overdue"
return False
@@ -67,7 +69,7 @@ class Client(BaseAuditModel):
class Site(BaseAuditModel):
client = models.ForeignKey(Client, related_name="sites", on_delete=models.CASCADE)
site = models.CharField(max_length=255)
name = models.CharField(max_length=255)
workstation_policy = models.ForeignKey(
"automation.Policy",
related_name="workstation_sites",
@@ -84,17 +86,15 @@ class Site(BaseAuditModel):
on_delete=models.SET_NULL,
)
class Meta:
ordering = ("name",)
def __str__(self):
return self.site
return self.name
@property
def has_maintenanace_mode_agents(self):
return (
Agent.objects.filter(
client=self.client.client, site=self.site, maintenance_mode=True
).count()
> 0
)
return Agent.objects.filter(site=self, maintenance_mode=True).count() > 0
@property
def has_failing_checks(self):
@@ -107,7 +107,7 @@ class Site(BaseAuditModel):
"last_seen",
"overdue_time",
)
.filter(client=self.client.client, site=self.site)
.filter(site=self)
.prefetch_related("agentchecks")
)
for agent in agents:
@@ -115,8 +115,7 @@ class Site(BaseAuditModel):
return True
if agent.overdue_email_alert or agent.overdue_text_alert:
if agent.status == "overdue":
return True
return agent.status == "overdue"
return False
@@ -128,13 +127,6 @@ class Site(BaseAuditModel):
return SiteSerializer(site).data
def validate_name(name):
if "|" in name:
return False
else:
return True
MON_TYPE_CHOICES = [
("server", "Server"),
("workstation", "Workstation"),

View File

@@ -3,19 +3,25 @@ from .models import Client, Site, Deployment
class SiteSerializer(ModelSerializer):
client_name = ReadOnlyField(source="client.name")
class Meta:
model = Site
fields = "__all__"
def validate(self, val):
if "|" in val["site"]:
if "|" in val["name"]:
raise ValidationError("Site name cannot contain the | character")
if self.context:
client = Client.objects.get(pk=self.context["clientpk"])
if Site.objects.filter(client=client, name=val["name"]).exists():
raise ValidationError(f"Site {val['name']} already exists")
return val
class ClientSerializer(ModelSerializer):
sites = SiteSerializer(many=True, read_only=True)
class Meta:
@@ -30,29 +36,38 @@ class ClientSerializer(ModelSerializer):
if len(self.context["site"]) > 255:
raise ValidationError("Site name too long")
if "|" in val["client"]:
if "|" in val["name"]:
raise ValidationError("Client name cannot contain the | character")
return val
class TreeSerializer(ModelSerializer):
client_name = ReadOnlyField(source="client.client")
class SiteTreeSerializer(ModelSerializer):
maintenance_mode = ReadOnlyField(source="has_maintenanace_mode_agents")
failing_checks = ReadOnlyField(source="has_failing_checks")
class Meta:
model = Site
fields = (
"id",
"site",
"client_name",
)
fields = "__all__"
ordering = ("failing_checks",)
class ClientTreeSerializer(ModelSerializer):
sites = SiteTreeSerializer(many=True, read_only=True)
maintenance_mode = ReadOnlyField(source="has_maintenanace_mode_agents")
failing_checks = ReadOnlyField(source="has_failing_checks")
class Meta:
model = Client
fields = "__all__"
ordering = ("failing_checks",)
class DeploymentSerializer(ModelSerializer):
client_id = ReadOnlyField(source="client.id")
site_id = ReadOnlyField(source="site.id")
client_name = ReadOnlyField(source="client.client")
site_name = ReadOnlyField(source="site.site")
client_name = ReadOnlyField(source="client.name")
site_name = ReadOnlyField(source="site.name")
class Meta:
model = Deployment

View File

@@ -1,16 +1,32 @@
import uuid
from unittest import mock
from tacticalrmm.test import BaseTestCase
from tacticalrmm.test import TacticalTestCase
from model_bakery import baker
from .models import Client, Site, Deployment
from rest_framework.serializers import ValidationError
from .serializers import (
ClientSerializer,
SiteSerializer,
ClientTreeSerializer,
DeploymentSerializer,
)
class TestClientViews(BaseTestCase):
class TestClientViews(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
def test_get_clients(self):
# setup data
baker.make("clients.Client", _quantity=5)
clients = Client.objects.all()
url = "/clients/clients/"
r = self.client.get(url)
r = self.client.get(url, format="json")
serializer = ClientSerializer(clients, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
@@ -21,15 +37,42 @@ class TestClientViews(BaseTestCase):
self.assertEqual(r.status_code, 200)
payload["client"] = "Company1|askd"
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
with self.assertRaisesMessage(
ValidationError, "Client name cannot contain the | character"
):
self.assertFalse(serializer.is_valid(raise_exception=True))
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
payload = {"client": "Company 1", "site": "Site2|a34"}
payload = {"client": "Company 156", "site": "Site2|a34"}
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
with self.assertRaisesMessage(
ValidationError, "Site name cannot contain the | character"
):
self.assertFalse(serializer.is_valid(raise_exception=True))
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
# test unique
payload = {"client": "Company 1", "site": "Site 1"}
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
with self.assertRaisesMessage(
ValidationError, "client with this name already exists."
):
self.assertFalse(serializer.is_valid(raise_exception=True))
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
# test long site name
payload = {"client": "Company 2394", "site": "Site123" * 100}
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
with self.assertRaisesMessage(ValidationError, "Site name too long"):
self.assertFalse(serializer.is_valid(raise_exception=True))
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
@@ -41,88 +84,177 @@ class TestClientViews(BaseTestCase):
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
def test_get_sites(self):
url = "/clients/sites/"
r = self.client.get(url)
self.check_not_authenticated("post", url)
def test_edit_client(self):
# setup data
client = baker.make("clients.Client")
# test invalid id
r = self.client.put("/clients/500/client/", format="json")
self.assertEqual(r.status_code, 404)
data = {"id": client.id, "name": "New Name"}
url = f"/clients/{client.id}/client/"
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertTrue(Client.objects.filter(name="New Name").exists())
self.check_not_authenticated("put", url)
def test_delete_client(self):
# setup data
client = baker.make("clients.Client")
site = baker.make("clients.Site", client=client)
agent = baker.make_recipe("agents.agent", site=site)
# test invalid id
r = self.client.delete("/clients/500/client/", format="json")
self.assertEqual(r.status_code, 404)
url = f"/clients/{client.id}/client/"
# test deleting with agents under client
r = self.client.delete(url, format="json")
self.assertEqual(r.status_code, 400)
# test successful deletion
agent.delete()
r = self.client.delete(url, format="json")
self.assertEqual(r.status_code, 200)
self.assertFalse(Client.objects.filter(pk=client.id).exists())
self.assertFalse(Site.objects.filter(pk=site.id).exists())
self.check_not_authenticated("put", url)
def test_get_sites(self):
# setup data
baker.make("clients.Site", _quantity=5)
sites = Site.objects.all()
url = "/clients/sites/"
r = self.client.get(url, format="json")
serializer = SiteSerializer(sites, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
def test_add_site(self):
url = "/clients/addsite/"
# setup data
site = baker.make("clients.Site")
payload = {"client": "Google", "site": "LA Office"}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
url = "/clients/sites/"
payload = {"client": "Google", "site": "LA Off|ice |*&@#$"}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
payload = {"client": "Google", "site": "KN Office"}
# test success add
payload = {"client": site.client.id, "name": "LA Office"}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
self.assertTrue(
Site.objects.filter(
name="LA Office", client__name=site.client.name
).exists()
)
# test with | symbol
payload = {"client": site.client.id, "name": "LA Off|ice |*&@#$"}
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
with self.assertRaisesMessage(
ValidationError, "Site name cannot contain the | character"
):
self.assertFalse(serializer.is_valid(raise_exception=True))
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 400)
# test site already exists
payload = {"client": site.client.id, "name": "LA Office"}
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
with self.assertRaisesMessage(ValidationError, "Site LA Office already exists"):
self.assertFalse(serializer.is_valid(raise_exception=True))
self.check_not_authenticated("post", url)
def test_list_clients(self):
url = "/clients/listclients/"
def test_edit_site(self):
# setup data
site = baker.make("clients.Site")
r = self.client.get(url)
# test invalid id
r = self.client.put("/clients/500/site/", format="json")
self.assertEqual(r.status_code, 404)
data = {"id": site.id, "name": "New Name", "client": site.client.id}
url = f"/clients/{site.id}/site/"
r = self.client.put(url, data, format="json")
self.assertEqual(r.status_code, 200)
self.assertTrue(Site.objects.filter(name="New Name").exists())
self.check_not_authenticated("get", url)
self.check_not_authenticated("put", url)
def test_load_tree(self):
def test_delete_site(self):
# setup data
site = baker.make("clients.Site")
agent = baker.make_recipe("agents.agent", site=site)
with mock.patch(
"clients.models.Client.has_failing_checks",
new_callable=mock.PropertyMock,
return_value=True,
):
# test invalid id
r = self.client.delete("/clients/500/site/", format="json")
self.assertEqual(r.status_code, 404)
url = "/clients/loadtree/"
url = f"/clients/{site.id}/site/"
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
# test deleting with last site under client
r = self.client.delete(url, format="json")
self.assertEqual(r.status_code, 400)
client = Client.objects.get(client="Facebook")
self.assertTrue(f"Facebook|{client.pk}|negative" in r.data.keys())
# test deletion when agents exist under site
baker.make("clients.Site", client=site.client)
r = self.client.delete(url, format="json")
self.assertEqual(r.status_code, 400)
with mock.patch(
"clients.models.Site.has_failing_checks",
new_callable=mock.PropertyMock,
return_value=False,
):
client = Client.objects.get(client="Google")
site = Site.objects.get(client=client, site="LA Office")
self.assertTrue(
f"LA Office|{site.pk}|black" in [i for i in r.data.values()][0]
)
self.check_not_authenticated("get", url)
def test_load_clients(self):
url = "/clients/loadclients/"
r = self.client.get(url)
# test successful deletion
agent.delete()
r = self.client.delete(url, format="json")
self.assertEqual(r.status_code, 200)
self.assertFalse(Site.objects.filter(pk=site.id).exists())
self.check_not_authenticated("delete", url)
def test_get_tree(self):
# setup data
baker.make("clients.Site", _quantity=10)
clients = Client.objects.all()
url = "/clients/tree/"
r = self.client.get(url, format="json")
serializer = ClientTreeSerializer(clients, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
def test_get_deployments(self):
# setup data
deployments = baker.make("clients.Deployment", _quantity=5)
url = "/clients/deployments/"
r = self.client.get(url)
serializer = DeploymentSerializer(deployments, many=True)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data, serializer.data)
self.check_not_authenticated("get", url)
def test_add_deployment(self):
# setup data
site = baker.make("clients.Site")
url = "/clients/deployments/"
payload = {
"client": "Google",
"site": "Main Office",
"client": site.client.id,
"site": site.id,
"expires": "2037-11-23 18:53",
"power": 1,
"ping": 0,
@@ -134,36 +266,26 @@ class TestClientViews(BaseTestCase):
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
payload["site"] = "ASDkjh23k4jh"
payload["site"] = "500"
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 404)
payload["client"] = "324234ASDqwe"
payload["client"] = "500"
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 404)
self.check_not_authenticated("post", url)
def test_delete_deployment(self):
# setup data
deployment = baker.make("clients.Deployment")
url = "/clients/deployments/"
payload = {
"client": "Google",
"site": "Main Office",
"expires": "2037-11-23 18:53",
"power": 1,
"ping": 0,
"rdp": 1,
"agenttype": "server",
"arch": "64",
}
r = self.client.post(url, payload, format="json")
self.assertEqual(r.status_code, 200)
dep = Deployment.objects.last()
url = f"/clients/{dep.pk}/deployment/"
url = f"/clients/{deployment.id}/deployment/"
r = self.client.delete(url)
self.assertEqual(r.status_code, 200)
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists())
url = "/clients/32348/deployment/"
r = self.client.delete(url)

View File

@@ -4,14 +4,9 @@ from . import views
urlpatterns = [
path("clients/", views.GetAddClients.as_view()),
path("<int:pk>/client/", views.GetUpdateDeleteClient.as_view()),
path("tree/", views.GetClientTree.as_view()),
path("sites/", views.GetAddSites.as_view()),
path("listclients/", views.list_clients),
path("listsites/", views.list_sites),
path("addsite/", views.add_site),
path("editsite/", views.edit_site),
path("deletesite/", views.delete_site),
path("loadtree/", views.load_tree),
path("loadclients/", views.load_clients),
path("<int:pk>/site/", views.GetUpdateDeleteSite.as_view()),
path("deployments/", views.AgentDeployment.as_view()),
path("<int:pk>/deployment/", views.AgentDeployment.as_view()),
path("<str:uid>/deploy/", views.GenerateAgent.as_view()),

View File

@@ -22,10 +22,10 @@ from rest_framework.decorators import api_view
from .serializers import (
ClientSerializer,
SiteSerializer,
TreeSerializer,
ClientTreeSerializer,
DeploymentSerializer,
)
from .models import Client, Site, Deployment, validate_name
from .models import Client, Site, Deployment
from agents.models import Agent
from core.models import CoreSettings
from tacticalrmm.utils import notify_error
@@ -39,51 +39,50 @@ class GetAddClients(APIView):
def post(self, request):
if "initialsetup" in request.data:
client = {"client": request.data["client"]["client"].strip()}
site = {"site": request.data["client"]["site"].strip()}
client = {"name": request.data["client"]["client"].strip()}
site = {"name": request.data["client"]["site"].strip()}
serializer = ClientSerializer(data=client, context=request.data["client"])
serializer.is_valid(raise_exception=True)
core = CoreSettings.objects.first()
core.default_time_zone = request.data["timezone"]
core.save(update_fields=["default_time_zone"])
else:
client = {"client": request.data["client"].strip()}
site = {"site": request.data["site"].strip()}
client = {"name": request.data["client"].strip()}
site = {"name": request.data["site"].strip()}
serializer = ClientSerializer(data=client, context=request.data)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
Site(client=obj, site=site["site"]).save()
Site(client=obj, name=site["name"]).save()
return Response(f"{obj} was added!")
class GetUpdateDeleteClient(APIView):
def patch(self, request, pk):
def put(self, request, pk):
client = get_object_or_404(Client, pk=pk)
orig = client.client
serializer = ClientSerializer(data=request.data, instance=client)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
serializer.save()
agents = Agent.objects.filter(client=orig)
for agent in agents:
agent.client = obj.client
agent.save(update_fields=["client"])
return Response(f"{orig} renamed to {obj}")
return Response("The Client was renamed")
def delete(self, request, pk):
client = get_object_or_404(Client, pk=pk)
agents = Agent.objects.filter(client=client.client)
if agents.exists():
agent_count = Agent.objects.filter(site__client=client).count()
if agent_count > 0:
return notify_error(
f"Cannot delete {client} while {agents.count()} agents exist in it. Move the agents to another client first."
f"Cannot delete {client} while {agent_count} agents exist in it. Move the agents to another client first."
)
client.delete()
return Response(f"{client.client} was deleted!")
return Response(f"{client.name} was deleted!")
class GetClientTree(APIView):
def get(self, request):
clients = Client.objects.all()
return Response(ClientTreeSerializer(clients, many=True).data)
class GetAddSites(APIView):
@@ -91,126 +90,42 @@ class GetAddSites(APIView):
sites = Site.objects.all()
return Response(SiteSerializer(sites, many=True).data)
def post(self, request):
name = request.data["name"].strip()
serializer = SiteSerializer(
data={"name": name, "client": request.data["client"]},
context={"clientpk": request.data["client"]},
)
serializer.is_valid(raise_exception=True)
serializer.save()
@api_view(["POST"])
def add_site(request):
client = Client.objects.get(client=request.data["client"].strip())
site = request.data["site"].strip()
if not validate_name(site):
content = {"error": "Site name cannot contain the | character"}
return Response(content, status=status.HTTP_400_BAD_REQUEST)
if Site.objects.filter(client=client).filter(site=site):
content = {"error": f"Site {site} already exists"}
return Response(content, status=status.HTTP_400_BAD_REQUEST)
try:
Site(client=client, site=site).save()
except DataError:
content = {"error": "Site name too long (max 255 chars)"}
return Response(content, status=status.HTTP_400_BAD_REQUEST)
else:
return Response("ok")
@api_view(["PATCH"])
def edit_site(request):
new_name = request.data["name"].strip()
class GetUpdateDeleteSite(APIView):
def put(self, request, pk):
if not validate_name(new_name):
err = "Site name cannot contain the | character"
return Response(err, status=status.HTTP_400_BAD_REQUEST)
site = get_object_or_404(Site, pk=pk)
serializer = SiteSerializer(instance=site, data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
client = get_object_or_404(Client, client=request.data["client"])
site = Site.objects.filter(client=client).filter(site=request.data["site"]).get()
return Response("ok")
agents = Agent.objects.filter(client=client.client).filter(site=site.site)
def delete(self, request, pk):
site = get_object_or_404(Site, pk=pk)
if site.client.sites.count() == 1:
return notify_error(f"A client must have at least 1 site.")
site.site = new_name
site.save(update_fields=["site"])
agent_count = Agent.objects.filter(site=site).count()
for agent in agents:
agent.site = new_name
agent.save(update_fields=["site"])
if agent_count > 0:
return notify_error(
f"Cannot delete {site.name} while {agent_count} agents exist in it. Move the agents to another site first."
)
return Response("ok")
@api_view(["DELETE"])
def delete_site(request):
client = get_object_or_404(Client, client=request.data["client"])
if client.sites.count() == 1:
return notify_error(f"A client must have at least 1 site.")
site = Site.objects.filter(client=client).filter(site=request.data["site"]).get()
agents = Agent.objects.filter(client=client.client).filter(site=site.site)
if agents.exists():
return notify_error(
f"Cannot delete {site} while {agents.count()} agents exist in it. Move the agents to another site first."
)
site.delete()
return Response(f"{site} was deleted!")
@api_view()
# for vue
def list_clients(request):
clients = Client.objects.all()
return Response(ClientSerializer(clients, many=True).data)
@api_view()
# for vue
def list_sites(request):
sites = Site.objects.all()
return Response(TreeSerializer(sites, many=True).data)
@api_view()
def load_tree(request):
clients = Client.objects.all()
new = {}
for x in clients:
b = []
sites = Site.objects.filter(client=x)
for i in sites:
if i.has_maintenanace_mode_agents:
b.append(f"{i.site}|{i.pk}|warning")
elif i.has_failing_checks:
b.append(f"{i.site}|{i.pk}|negative")
else:
b.append(f"{i.site}|{i.pk}|black")
if x.has_maintenanace_mode_agents:
new[f"{x.client}|{x.pk}|warning"] = b
elif x.has_failing_checks:
new[f"{x.client}|{x.pk}|negative"] = b
else:
new[f"{x.client}|{x.pk}|black"] = b
return Response(new)
@api_view()
def load_clients(request):
clients = Client.objects.all()
new = {}
for x in clients:
b = []
sites = Site.objects.filter(client=x)
for i in sites:
b.append(i.site)
new[x.client] = b
return Response(new)
site.delete()
return Response(f"{site.name} was deleted!")
class AgentDeployment(APIView):
@@ -221,8 +136,8 @@ class AgentDeployment(APIView):
def post(self, request):
from knox.models import AuthToken
client = get_object_or_404(Client, client=request.data["client"])
site = get_object_or_404(Site, client=client, site=request.data["site"])
client = get_object_or_404(Client, pk=request.data["client"])
site = get_object_or_404(Site, pk=request.data["site"])
expires = dt.datetime.strptime(
request.data["expires"], "%Y-%m-%d %H:%M"
@@ -285,8 +200,8 @@ class GenerateAgent(APIView):
)
download_url = settings.DL_64 if d.arch == "64" else settings.DL_32
client = d.client.client.replace(" ", "").lower()
site = d.site.site.replace(" ", "").lower()
client = d.client.name.replace(" ", "").lower()
site = d.site.name.replace(" ", "").lower()
client = re.sub(r"([^a-zA-Z0-9]+)", "", client)
site = re.sub(r"([^a-zA-Z0-9]+)", "", site)

View File

@@ -133,7 +133,7 @@ func main() {
os.Exit(1)
}
time.Sleep(20 * time.Second)
time.Sleep(10 * time.Second)
fmt.Println("Installation starting.")
cmd := exec.Command(tacrmm, cmdArgs...)

View File

@@ -36,7 +36,7 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
write-host ('Extracting...')
Start-Sleep -s 20
Start-Sleep -s 10
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
exit 0
}

View File

@@ -3,7 +3,6 @@ from django.conf import settings
from core.models import CoreSettings
from .helpers import get_auth_token
import asyncio
import ssl
import websockets
import json
@@ -11,15 +10,15 @@ import json
class Command(BaseCommand):
help = "Sets up initial mesh central configuration"
async def websocket_call(self):
async def websocket_call(self, mesh_settings):
token = get_auth_token(
self.mesh_settings.mesh_username, self.mesh_settings.mesh_token
mesh_settings.mesh_username, mesh_settings.mesh_token
)
if settings.MESH_WS_URL:
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
else:
site = self.mesh_settings.mesh_site.replace("https", "wss")
site = mesh_settings.mesh_site.replace("https", "wss")
uri = f"{site}/control.ashx?auth={token}"
async with websockets.connect(uri) as websocket:
@@ -45,5 +44,5 @@ class Command(BaseCommand):
break
def handle(self, *args, **kwargs):
self.mesh_settings = CoreSettings.objects.first()
asyncio.get_event_loop().run_until_complete(self.websocket_call())
mesh_settings = CoreSettings.objects.first()
asyncio.get_event_loop().run_until_complete(self.websocket_call(mesh_settings))

View File

@@ -1,53 +1,83 @@
from django.core.management.base import BaseCommand
from django.conf import settings
from core.models import CoreSettings
from .helpers import get_auth_token
import asyncio
import websockets
import json
class Command(BaseCommand):
help = "Sets up initial mesh central configuration"
async def websocket_call(self):
token = get_auth_token(
self.mesh_settings.mesh_username, self.mesh_settings.mesh_token
)
if settings.MESH_WS_URL:
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
else:
site = self.mesh_settings.mesh_site.replace("https", "wss")
uri = f"{site}/control.ashx?auth={token}"
async with websockets.connect(uri) as websocket:
# Get Device groups to see if it exists
await websocket.send(json.dumps({"action": "meshes"}))
async for message in websocket:
response = json.loads(message)
if response["action"] == "meshes":
# If no meshes are present
if not response["meshes"]:
await websocket.send(
json.dumps(
{
"action": "createmesh",
"meshname": "TacticalRMM",
"meshtype": 2,
"responseid": "python",
}
)
)
break
else:
break
def handle(self, *args, **kwargs):
self.mesh_settings = CoreSettings.objects.first()
asyncio.get_event_loop().run_until_complete(self.websocket_call())
self.stdout.write("Initial Mesh Central setup complete")
from django.core.management.base import BaseCommand
from django.conf import settings
from core.models import CoreSettings
from .helpers import get_auth_token
import asyncio
import websockets
import json
class Command(BaseCommand):
help = "Sets up initial mesh central configuration"
async def websocket_call(self, mesh_settings):
token = get_auth_token(
mesh_settings.mesh_username, mesh_settings.mesh_token
)
if settings.MESH_WS_URL:
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
else:
site = mesh_settings.mesh_site.replace("https", "wss")
uri = f"{site}/control.ashx?auth={token}"
async with websockets.connect(uri) as websocket:
# Get Device groups to see if it exists
await websocket.send(json.dumps({"action": "meshes"}))
async for message in websocket:
response = json.loads(message)
if response["action"] == "meshes":
# If no meshes are present
if not response["meshes"]:
await websocket.send(
json.dumps(
{
"action": "createmesh",
"meshname": "TacticalRMM",
"meshtype": 2,
"responseid": "python",
}
)
)
break
else:
break
def handle(self, *args, **kwargs):
mesh_settings = CoreSettings.objects.first()
try:
# Check for Mesh Username
if not mesh_settings.mesh_username or settings.MESH_USERNAME != mesh_settings.mesh_username:
mesh_settings.mesh_username = settings.MESH_USERNAME
# Check for Mesh Site
if not mesh_settings.mesh_site or settings.MESH_SITE != mesh_settings.mesh_site:
mesh_settings.mesh_site = settings.MESH_SITE
# Check for Mesh Token
if (
not mesh_settings.mesh_token
or settings.MESH_TOKEN_KEY != mesh_settings.mesh_token
):
mesh_settings.mesh_token = settings.MESH_TOKEN_KEY
mesh_settings.save()
except AttributeError:
self.stdout.write(
"Mesh Setup was skipped because the configuration wasn't available. Needs to be setup manually."
)
return
try:
asyncio.get_event_loop().run_until_complete(self.websocket_call(mesh_settings))
self.stdout.write("Initial Mesh Central setup complete")
except websockets.exceptions.ConnectionClosedError:
self.stdout.write(
"Unable to connect to MeshCentral. Please verify it is online and the configuration is correct in the settings."
)

View File

@@ -0,0 +1,9 @@
from django.core.management.base import BaseCommand
from tacticalrmm.utils import reload_nats
class Command(BaseCommand):
help = "Reload Nats"
def handle(self, *args, **kwargs):
reload_nats()

View File

@@ -72,16 +72,14 @@ class CoreSettings(BaseAuditModel):
if not self.pk and CoreSettings.objects.exists():
raise ValidationError("There can only be one CoreSettings instance")
# Only runs on first create
# for install script
if not self.pk:
mesh_settings = self.get_initial_mesh_settings()
if "mesh_token" in mesh_settings:
self.mesh_token = mesh_settings["mesh_token"]
if "mesh_username" in mesh_settings:
self.mesh_username = mesh_settings["mesh_username"]
if "mesh_site" in mesh_settings:
self.mesh_site = mesh_settings["mesh_site"]
try:
self.mesh_site = settings.MESH_SITE
self.mesh_username = settings.MESH_USERNAME
self.mesh_token = settings.MESH_TOKEN_KEY
except:
pass
return super(CoreSettings, self).save(*args, **kwargs)
@@ -121,8 +119,8 @@ class CoreSettings(BaseAuditModel):
and self.smtp_port
):
return True
else:
return False
return False
def send_mail(self, subject, body, test=False):
@@ -168,60 +166,9 @@ class CoreSettings(BaseAuditModel):
except Exception as e:
logger.error(f"SMS failed to send: {e}")
def get_initial_mesh_settings(self):
mesh_settings = {}
# Check for Mesh Username
try:
if settings.MESH_USERNAME:
mesh_settings["mesh_username"] = settings.MESH_USERNAME
else:
raise AttributeError("MESH_USERNAME doesn't exist")
except AttributeError:
pass
# Check for Mesh Site
try:
if settings.MESH_SITE:
mesh_settings["mesh_site"] = settings.MESH_SITE
else:
raise AttributeError("MESH_SITE doesn't exist")
except AttributeError:
pass
# Check for Mesh Token
try:
if settings.MESH_TOKEN_KEY:
mesh_settings["mesh_token"] = settings.MESH_TOKEN_KEY
else:
raise AttributeError("MESH_TOKEN_KEY doesn't exist")
except AttributeError:
filepath = "/token/token.key"
counter = 0
while counter < 12:
try:
with open(filepath, "r") as read_file:
key = read_file.readlines()
# Remove key file contents for security reasons
with open(filepath, "w") as write_file:
write_file.write("")
# readlines() returns an array. Get first item
mesh_settings["mesh_token"] = key[0].rstrip()
break
except (IOError, IndexError):
pass
counter = counter + 1
time.sleep(10)
return mesh_settings
@staticmethod
def serialize(core):
# serializes the core and returns json
from .serializers import CoreSerializer
return CoreSerializer(core).data
return CoreSerializer(core).data

View File

@@ -4,8 +4,6 @@ from loguru import logger
from django.conf import settings
from django.utils import timezone as djangotime
from tacticalrmm.celery import app
from accounts.models import User
from agents.models import Agent
from autotasks.models import AutomatedTask
from autotasks.tasks import delete_win_task_schedule
@@ -14,15 +12,6 @@ logger.configure(**settings.LOG_CONFIG)
@app.task
def core_maintenance_tasks():
# cleanup any leftover agent user accounts
agents = Agent.objects.values_list("agent_id", flat=True)
users = User.objects.exclude(username__in=agents).filter(last_login=None)
if users:
users.delete()
logger.info(
"Removed leftover agent user accounts:", str([i.username for i in users])
)
# cleanup expired runonce tasks
tasks = AutomatedTask.objects.filter(
task_type="runonce",

View File

@@ -1,8 +1,12 @@
from tacticalrmm.test import BaseTestCase
from tacticalrmm.test import TacticalTestCase
from core.tasks import core_maintenance_tasks
class TestCoreTasks(BaseTestCase):
class TestCoreTasks(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
self.authenticate()
def test_core_maintenance_tasks(self):
task = core_maintenance_tasks.s().apply()
self.assertEqual(task.state, "SUCCESS")

View File

@@ -68,7 +68,9 @@ def version(request):
@api_view()
def dashboard_info(request):
return Response({"trmm_version": settings.TRMM_VERSION})
return Response(
{"trmm_version": settings.TRMM_VERSION, "dark_mode": request.user.dark_mode}
)
@api_view()

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.2 on 2020-11-10 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0007_auditlog_debug_info'),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='action',
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.2 on 2020-11-10 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0008_auto_20201110_1431'),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='action',
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command')], max_length=100),
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 3.1.2 on 2020-11-10 22:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0009_auto_20201110_1431'),
]
operations = [
migrations.AlterField(
model_name='auditlog',
name='action',
field=models.CharField(choices=[('login', 'User Login'), ('failed_login', 'Failed User Login'), ('delete', 'Delete Object'), ('modify', 'Modify Object'), ('add', 'Add Object'), ('view', 'View Object'), ('check_run', 'Check Run'), ('task_run', 'Task Run'), ('agent_install', 'Agent Install'), ('remote_session', 'Remote Session'), ('execute_script', 'Execute Script'), ('execute_command', 'Execute Command'), ('bulk_action', 'Bulk Action')], max_length=100),
),
migrations.AlterField(
model_name='auditlog',
name='object_type',
field=models.CharField(choices=[('user', 'User'), ('script', 'Script'), ('agent', 'Agent'), ('policy', 'Policy'), ('winupdatepolicy', 'Patch Policy'), ('client', 'Client'), ('site', 'Site'), ('check', 'Check'), ('automatedtask', 'Automated Task'), ('coresettings', 'Core Settings'), ('bulk', 'Bulk')], max_length=100),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 3.1.3 on 2020-11-19 08:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0010_auto_20201110_2238'),
]
operations = [
migrations.AlterField(
model_name='pendingaction',
name='action_type',
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update')], max_length=255, null=True),
),
]

View File

@@ -1,4 +1,5 @@
import datetime as dt
import json
from abc import abstractmethod
from django.db import models
from tacticalrmm.middleware import get_username, get_debug_info
@@ -6,6 +7,7 @@ from tacticalrmm.middleware import get_username, get_debug_info
ACTION_TYPE_CHOICES = [
("schedreboot", "Scheduled Reboot"),
("taskaction", "Scheduled Task Action"),
("agentupdate", "Agent Update"),
]
AUDIT_ACTION_TYPE_CHOICES = [
@@ -15,9 +17,13 @@ AUDIT_ACTION_TYPE_CHOICES = [
("modify", "Modify Object"),
("add", "Add Object"),
("view", "View Object"),
("check_run", "Check Run"),
("task_run", "Task Run"),
("agent_install", "Agent Install"),
("remote_session", "Remote Session"),
("execute_script", "Execute Script"),
("execute_command", "Execute Command"),
("bulk_action", "Bulk Action"),
]
AUDIT_OBJECT_TYPE_CHOICES = [
@@ -31,6 +37,7 @@ AUDIT_OBJECT_TYPE_CHOICES = [
("check", "Check"),
("automatedtask", "Automated Task"),
("coresettings", "Core Settings"),
("bulk", "Bulk"),
]
# taskaction details format
@@ -170,6 +177,45 @@ class AuditLog(models.Model):
debug_info=debug_info,
)
@staticmethod
def audit_bulk_action(username, action, affected, debug_info={}):
from clients.models import Client, Site
from agents.models import Agent
from scripts.models import Script
target = ""
agents = None
if affected["target"] == "all":
target = "on all agents"
elif affected["target"] == "client":
client = Client.objects.get(pk=affected["client"])
target = f"on all agents within client: {client.name}"
elif affected["target"] == "site":
site = Site.objects.get(pk=affected["site"])
target = f"on all agents within site: {site.client.name}\\{site.name}"
elif affected["target"] == "agents":
agents = Agent.objects.filter(pk__in=affected["agentPKs"]).values_list(
"hostname", flat=True
)
target = "on multiple agents"
if action == "script":
script = Script.objects.get(pk=affected["scriptPK"])
action = f"script: {script.name}"
if agents:
affected["agent_hostnames"] = list(agents)
AuditLog.objects.create(
username=username,
object_type="bulk",
action="bulk_action",
message=f"{username} executed bulk {action} {target}",
debug_info=debug_info,
after_value=affected,
)
class DebugLog(models.Model):
pass
@@ -203,7 +249,7 @@ class PendingAction(models.Model):
obj = dt.datetime.strptime(self.details["time"], "%Y-%m-%d %H:%M:%S")
return dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
elif self.action_type == "taskaction":
elif self.action_type == "taskaction" or self.action_type == "agentupdate":
return "Next agent check-in"
@property
@@ -211,6 +257,9 @@ class PendingAction(models.Model):
if self.action_type == "schedreboot":
return "Device pending reboot"
elif self.action_type == "agentupdate":
return f"Agent update to {self.details['version']}"
elif self.action_type == "taskaction":
if self.details["action"] == "taskdelete":
return "Device pending task deletion"
@@ -246,28 +295,31 @@ class BaseAuditModel(models.Model):
before_value = {}
object_class = type(self)
object_name = object_class.__name__.lower()
username = get_username()
# populate created_by and modified_by fields on instance
if not getattr(self, "created_by", None):
self.created_by = get_username()
self.created_by = username
if hasattr(self, "modified_by"):
self.modified_by = get_username()
self.modified_by = username
# capture object properties before edit
if self.pk:
before_value = object_class.objects.get(pk=self.id)
# dont create entry for agent add since that is done in view
if not self.pk:
AuditLog.audit_object_add(
get_username(),
object_class.__name__.lower(),
username,
object_name,
object_class.serialize(self),
self.__str__(),
debug_info=get_debug_info(),
)
else:
AuditLog.audit_object_changed(
get_username(),
username,
object_class.__name__.lower(),
object_class.serialize(before_value),
object_class.serialize(self),
@@ -280,6 +332,7 @@ class BaseAuditModel(models.Model):
def delete(self, *args, **kwargs):
if get_username():
object_class = type(self)
AuditLog.audit_object_delete(
get_username(),

View File

@@ -22,8 +22,8 @@ class PendingActionSerializer(serializers.ModelSerializer):
hostname = serializers.ReadOnlyField(source="agent.hostname")
salt_id = serializers.ReadOnlyField(source="agent.salt_id")
client = serializers.ReadOnlyField(source="agent.client")
site = serializers.ReadOnlyField(source="agent.site")
client = serializers.ReadOnlyField(source="agent.client.name")
site = serializers.ReadOnlyField(source="agent.site.name")
due = serializers.ReadOnlyField()
description = serializers.ReadOnlyField()

View File

@@ -11,6 +11,10 @@ class TestAuditViews(TacticalTestCase):
self.setup_coresettings()
def create_audit_records(self):
# create clients for client filter
site = baker.make("clients.Site")
baker.make_recipe("agents.agent", site=site, hostname="AgentHostname1")
# user jim agent logs
baker.make_recipe(
"logs.agent_logs",
@@ -75,11 +79,13 @@ class TestAuditViews(TacticalTestCase):
_quantity=13,
)
return site
def test_get_audit_logs(self):
url = "/logs/auditlogs/"
# create data
self.create_audit_records()
site = self.create_audit_records()
# test data and result counts
data = [
@@ -111,6 +117,9 @@ class TestAuditViews(TacticalTestCase):
"count": 40,
},
{"filter": {"timeFilter": 35, "userFilter": ["james", "jim"]}, "count": 81},
{"filter": {"objectFilter": ["user"]}, "count": 26},
{"filter": {"actionFilter": ["login"]}, "count": 12},
{"filter": {"clientFilter": [site.client.id]}, "count": 23},
]
for req in data:

View File

@@ -4,6 +4,7 @@ from django.conf import settings
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.utils import timezone as djangotime
from django.db.models import Q
from datetime import datetime as dt
from rest_framework.response import Response
@@ -22,32 +23,52 @@ from .tasks import cancel_pending_action_task
class GetAuditLogs(APIView):
def patch(self, request):
from clients.models import Client
from agents.models import Agent
auditLogs = None
if "agentFilter" in request.data and "userFilter" in request.data:
audit_logs = AuditLog.objects.filter(
agent__in=request.data["agentFilter"],
username__in=request.data["userFilter"],
agentFilter = Q()
clientFilter = Q()
actionFilter = Q()
objectFilter = Q()
userFilter = Q()
timeFilter = Q()
if "agentFilter" in request.data:
agentFilter = Q(agent__in=request.data["agentFilter"])
elif "clientFilter" in request.data:
clients = Client.objects.filter(
pk__in=request.data["clientFilter"]
).values_list("id")
agents = Agent.objects.filter(site__client_id__in=clients).values_list(
"hostname"
)
clientFilter = Q(agent__in=agents)
elif "userFilter" in request.data:
audit_logs = AuditLog.objects.filter(
username__in=request.data["userFilter"]
)
if "userFilter" in request.data:
userFilter = Q(username__in=request.data["userFilter"])
elif "agentFilter" in request.data:
audit_logs = AuditLog.objects.filter(agent__in=request.data["agentFilter"])
if "actionFilter" in request.data:
actionFilter = Q(action__in=request.data["actionFilter"])
else:
audit_logs = AuditLog.objects.all()
if "objectFilter" in request.data:
objectFilter = Q(object_type__in=request.data["objectFilter"])
if audit_logs and "timeFilter" in request.data:
audit_logs = audit_logs.filter(
if "timeFilter" in request.data:
timeFilter = Q(
entry_time__lte=djangotime.make_aware(dt.today()),
entry_time__gt=djangotime.make_aware(dt.today())
- djangotime.timedelta(days=request.data["timeFilter"]),
)
audit_logs = (
AuditLog.objects.filter(agentFilter | clientFilter)
.filter(userFilter)
.filter(actionFilter)
.filter(objectFilter)
.filter(timeFilter)
)
return Response(AuditLogSerializer(audit_logs, many=True).data)
@@ -58,9 +79,8 @@ class FilterOptionsAuditLog(APIView):
return Response(AgentHostnameSerializer(agents, many=True).data)
if request.data["type"] == "user":
agents = Agent.objects.values_list("agent_id", flat=True)
users = User.objects.exclude(username__in=agents).filter(
username__icontains=request.data["pattern"]
users = User.objects.filter(
username__icontains=request.data["pattern"], agent=None
)
return Response(UserSerializer(users, many=True).data)

View File

@@ -1,16 +1,17 @@
amqp==2.6.1
asgiref==3.2.10
asgiref==3.3.0
asyncio-nats-client==0.11.4
billiard==3.6.3.0
celery==4.4.6
certifi==2020.6.20
certifi==2020.11.8
cffi==1.14.3
chardet==3.0.4
cryptography==3.2
cryptography==3.2.1
decorator==4.4.2
Django==3.1.2
Django==3.1.3
django-cors-headers==3.5.0
django-rest-knox==4.1.0
djangorestframework==3.12.1
djangorestframework==3.12.2
future==0.18.2
idna==2.10
kombu==4.6.11
@@ -19,19 +20,20 @@ msgpack==1.0.0
packaging==20.4
psycopg2-binary==2.8.6
pycparser==2.20
pycryptodome==3.9.8
pycryptodome==3.9.9
pyotp==2.4.1
pyparsing==2.4.7
pytz==2020.1
pytz==2020.4
qrcode==6.1
redis==3.5.3
requests==2.24.0
six==1.15.0
sqlparse==0.4.1
twilio==6.46.0
urllib3==1.25.10
tldextract==3.0.2
twilio==6.47.0
urllib3==1.25.11
uWSGI==2.0.19.1
validators==0.18.1
vine==1.3.0
websockets==8.1
zipp==3.3.1
zipp==3.4.0

View File

@@ -1,5 +1,6 @@
from django.db import models
from logs.models import BaseAuditModel
from django.conf import settings
SCRIPT_SHELLS = [
("powershell", "Powershell"),
@@ -38,9 +39,9 @@ class Script(BaseAuditModel):
@property
def file(self):
if self.script_type == "userdefined":
return f"/srv/salt/scripts/userdefined/{self.filename}"
return f"{settings.SCRIPTS_DIR}/userdefined/{self.filename}"
else:
return f"/srv/salt/scripts/{self.filename}"
return f"{settings.SCRIPTS_DIR}/{self.filename}"
@property
def code(self):
@@ -62,7 +63,13 @@ class Script(BaseAuditModel):
# load community uploaded scripts into the database
# skip ones that already exist, only updating name / desc in case it changes
# files will be copied by the update script or in docker to /srv/salt/scripts
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
# for install script
if not settings.DOCKER_BUILD:
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
# for docker
else:
scripts_dir = settings.SCRIPTS_DIR
with open(
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")

View File

@@ -1,5 +1,6 @@
import os
from django.conf import settings
from rest_framework.serializers import ModelSerializer, ValidationError, ReadOnlyField
from .models import Script
@@ -27,7 +28,7 @@ class ScriptSerializer(ModelSerializer):
# but only if adding, not if editing since will overwrite if edit
if not self.instance:
script_path = os.path.join(
"/srv/salt/scripts/userdefined", val["filename"]
f"{settings.SCRIPTS_DIR}/userdefined", val["filename"]
)
if os.path.exists(script_path):
raise ValidationError(

View File

@@ -1,38 +1,73 @@
import asyncio
from tacticalrmm.celery import app
from agents.models import Agent
from .models import Script
from scripts.models import Script
@app.task
def run_script_bg_task(data):
agent = Agent.objects.get(pk=data["agentpk"])
script = Script.objects.get(pk=data["scriptpk"])
def handle_bulk_command_task(agentpks, cmd, shell, timeout):
agents = Agent.objects.filter(pk__in=agentpks)
agent.salt_api_async(
func="win_agent.run_script",
kwargs={
"filepath": script.filepath,
"filename": script.filename,
"shell": script.shell,
"timeout": data["timeout"],
"args": data["args"],
},
)
agents_nats = [agent for agent in agents if agent.has_nats]
agents_salt = [agent for agent in agents if not agent.has_nats]
minions = [agent.salt_id for agent in agents_salt]
if minions:
Agent.salt_batch_async(
minions=minions,
func="cmd.run_bg",
kwargs={
"cmd": cmd,
"shell": shell,
"timeout": timeout,
},
)
if agents_nats:
nats_data = {
"func": "rawcmd",
"timeout": timeout,
"payload": {
"command": cmd,
"shell": shell,
},
}
for agent in agents_nats:
asyncio.run(agent.nats_cmd(nats_data, wait=False))
@app.task
def run_bulk_script_task(data):
# for powershell and batch scripts only, workaround for salt bg script bug
script = Script.objects.get(pk=data["scriptpk"])
def handle_bulk_script_task(scriptpk, agentpks, args, timeout):
script = Script.objects.get(pk=scriptpk)
agents = Agent.objects.filter(pk__in=agentpks)
Agent.salt_batch_async(
minions=data["minions"],
func="win_agent.run_script",
kwargs={
"filepath": script.filepath,
"filename": script.filename,
agents_nats = [agent for agent in agents if agent.has_nats]
agents_salt = [agent for agent in agents if not agent.has_nats]
minions = [agent.salt_id for agent in agents_salt]
if minions:
Agent.salt_batch_async(
minions=minions,
func="win_agent.run_script",
kwargs={
"filepath": script.filepath,
"filename": script.filename,
"shell": script.shell,
"timeout": timeout,
"args": args,
"bg": True if script.shell == "python" else False, # salt bg script bug
},
)
nats_data = {
"func": "runscript",
"timeout": timeout,
"script_args": args,
"payload": {
"code": script.code,
"shell": script.shell,
"timeout": data["timeout"],
"args": data["args"],
},
)
}
for agent in agents_nats:
asyncio.run(agent.nats_cmd(nats_data, wait=False))

View File

@@ -94,7 +94,11 @@ class TestScriptViews(TacticalTestCase):
def test_load_community_scripts(self):
valid_shells = ["powershell", "python", "cmd"]
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
if not settings.DOCKER_BUILD:
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
else:
scripts_dir = settings.SCRIPTS_DIR
with open(
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")

View File

@@ -32,8 +32,8 @@ class TestServiceViews(TacticalTestCase):
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_get_refreshed_services(self, salt_api_cmd):
@patch("agents.models.Agent.nats_cmd")
def test_get_refreshed_services(self, nats_cmd):
# test a call where agent doesn't exist
resp = self.client.get("/services/500/refreshedservices/", format="json")
self.assertEqual(resp.status_code, 404)
@@ -41,7 +41,7 @@ class TestServiceViews(TacticalTestCase):
agent = baker.make_recipe("agents.agent_with_services")
url = f"/services/{agent.pk}/refreshedservices/"
salt_return = [
nats_return = [
{
"pid": 880,
"name": "AeLookupSvc",
@@ -65,30 +65,23 @@ class TestServiceViews(TacticalTestCase):
]
# test failed attempt
salt_api_cmd.return_value = "timeout"
nats_cmd.return_value = "timeout"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(timeout=15, func="win_agent.get_services")
salt_api_cmd.reset_mock()
# test failed attempt
salt_api_cmd.return_value = "error"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(timeout=15, func="win_agent.get_services")
salt_api_cmd.reset_mock()
nats_cmd.assert_called_with(data={"func": "winservices"}, timeout=10)
nats_cmd.reset_mock()
# test successful attempt
salt_api_cmd.return_value = salt_return
nats_cmd.return_value = nats_return
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(timeout=15, func="win_agent.get_services")
self.assertEquals(Agent.objects.get(pk=agent.pk).services, salt_return)
nats_cmd.assert_called_with(data={"func": "winservices"}, timeout=10)
self.assertEquals(Agent.objects.get(pk=agent.pk).services, nats_return)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_service_action(self, salt_api_cmd):
@patch("agents.models.Agent.nats_cmd")
def test_service_action(self, nats_cmd):
url = "/services/serviceaction/"
invalid_data = {"pk": 500, "sv_name": "AeLookupSvc", "sv_action": "restart"}
@@ -101,47 +94,37 @@ class TestServiceViews(TacticalTestCase):
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "sv_action": "restart"}
# test failed attempt
salt_api_cmd.return_value = "timeout"
nats_cmd.return_value = "timeout"
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(
timeout=45,
func=f"service.restart",
arg="AeLookupSvc",
nats_cmd.assert_called_with(
{
"func": "winsvcaction",
"payload": {
"name": "AeLookupSvc",
"action": "stop",
},
},
timeout=32,
)
salt_api_cmd.reset_mock()
salt_api_cmd.return_value = "error"
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(
timeout=45,
func=f"service.restart",
arg="AeLookupSvc",
)
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
# test successful attempt
salt_api_cmd.return_value = True
nats_cmd.return_value = {"success": True, "errormsg": ""}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(
timeout=45,
func=f"service.restart",
arg="AeLookupSvc",
)
self.check_not_authenticated("post", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_service_detail(self, salt_api_cmd):
@patch("agents.models.Agent.nats_cmd")
def test_service_detail(self, nats_cmd):
# test a call where agent doesn't exist
resp = self.client.get(
"/services/500/doesntexist/servicedetail/", format="json"
)
self.assertEqual(resp.status_code, 404)
salt_return = {
nats_return = {
"pid": 812,
"name": "ALG",
"status": "stopped",
@@ -156,29 +139,27 @@ class TestServiceViews(TacticalTestCase):
url = f"/services/{agent.pk}/alg/servicedetail/"
# test failed attempt
salt_api_cmd.return_value = "timeout"
nats_cmd.return_value = "timeout"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(timeout=20, func="service.info", arg="alg")
salt_api_cmd.reset_mock()
salt_api_cmd.return_value = "error"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(timeout=20, func="service.info", arg="alg")
salt_api_cmd.reset_mock()
nats_cmd.assert_called_with(
{"func": "winsvcdetail", "payload": {"name": "alg"}}, timeout=10
)
nats_cmd.reset_mock()
# test successful attempt
salt_api_cmd.return_value = salt_return
nats_cmd.return_value = nats_return
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(timeout=20, func="service.info", arg="alg")
self.assertEquals(resp.data, salt_return)
nats_cmd.assert_called_with(
{"func": "winsvcdetail", "payload": {"name": "alg"}}, timeout=10
)
self.assertEquals(resp.data, nats_return)
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_edit_service(self, salt_api_cmd):
@patch("agents.models.Agent.nats_cmd")
def test_edit_service(self, nats_cmd):
url = "/services/editservice/"
agent = baker.make_recipe("agents.agent_with_services")
@@ -189,64 +170,43 @@ class TestServiceViews(TacticalTestCase):
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "autodelay"}
# test failed attempt
salt_api_cmd.return_value = "timeout"
# test timeout
nats_cmd.return_value = "timeout"
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(
timeout=20,
func="service.modify",
arg=data["sv_name"],
kwargs={"start_type": "auto", "start_delayed": True},
)
salt_api_cmd.reset_mock()
salt_api_cmd.return_value = "error"
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(
timeout=20,
func="service.modify",
arg=data["sv_name"],
kwargs={"start_type": "auto", "start_delayed": True},
)
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
# test successful attempt autodelay
salt_api_cmd.return_value = True
nats_cmd.return_value = {"success": True, "errormsg": ""}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(
timeout=20,
func="service.modify",
arg=data["sv_name"],
kwargs={"start_type": "auto", "start_delayed": True},
nats_cmd.assert_called_with(
{
"func": "editwinsvc",
"payload": {
"name": "AeLookupSvc",
"startType": "autodelay",
},
},
timeout=10,
)
salt_api_cmd.reset_mock()
nats_cmd.reset_mock()
# test successful attempt with auto
# test error message from agent
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "auto"}
salt_api_cmd.return_value = True
nats_cmd.return_value = {
"success": False,
"errormsg": "The parameter is incorrect",
}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(
timeout=20,
func="service.modify",
arg=data["sv_name"],
kwargs={"start_type": "auto", "start_delayed": False},
)
salt_api_cmd.reset_mock()
self.assertEqual(resp.status_code, 400)
nats_cmd.reset_mock()
# test successful attempt with manual
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "manual"}
salt_api_cmd.return_value = True
# test catch all
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "auto"}
nats_cmd.return_value = {"success": False, "errormsg": ""}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(
timeout=20,
func="service.modify",
arg=data["sv_name"],
kwargs={"start_type": "manual"},
)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.data, "Something went wrong")
self.check_not_authenticated("post", url)

View File

@@ -1,3 +1,4 @@
import asyncio
from loguru import logger
from rest_framework.response import Response
@@ -30,12 +31,12 @@ def default_services(request):
@api_view()
def get_refreshed_services(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=15, func="win_agent.get_services")
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
r = asyncio.run(agent.nats_cmd(data={"func": "winservices"}, timeout=10))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error" or not r:
return notify_error("Something went wrong")
agent.services = r
agent.save(update_fields=["services"])
@@ -44,64 +45,79 @@ def get_refreshed_services(request, pk):
@api_view(["POST"])
def service_action(request):
data = request.data
pk = data["pk"]
service_name = data["sv_name"]
service_action = data["sv_action"]
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(
timeout=45,
func=f"service.{service_action}",
arg=service_name,
)
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
action = request.data["sv_action"]
data = {
"func": "winsvcaction",
"payload": {
"name": request.data["sv_name"],
},
}
# response struct from agent: {success: bool, errormsg: string}
if action == "restart":
data["payload"]["action"] = "stop"
r = asyncio.run(agent.nats_cmd(data, timeout=32))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif not r["success"] and r["errormsg"]:
return notify_error(r["errormsg"])
elif r["success"]:
data["payload"]["action"] = "start"
r = asyncio.run(agent.nats_cmd(data, timeout=32))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif not r["success"] and r["errormsg"]:
return notify_error(r["errormsg"])
elif r["success"]:
return Response("ok")
else:
data["payload"]["action"] = action
r = asyncio.run(agent.nats_cmd(data, timeout=32))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif not r["success"] and r["errormsg"]:
return notify_error(r["errormsg"])
elif r["success"]:
return Response("ok")
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error" or not r:
return notify_error("Something went wrong")
return Response("ok")
return notify_error("Something went wrong")
@api_view()
def service_detail(request, pk, svcname):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(timeout=20, func="service.info", arg=svcname)
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
data = {"func": "winsvcdetail", "payload": {"name": svcname}}
r = asyncio.run(agent.nats_cmd(data, timeout=10))
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error" or not r:
return notify_error("Something went wrong")
return Response(r)
@api_view(["POST"])
def edit_service(request):
data = request.data
pk = data["pk"]
service_name = data["sv_name"]
edit_action = data["edit_action"]
agent = get_object_or_404(Agent, pk=pk)
if edit_action == "autodelay":
kwargs = {"start_type": "auto", "start_delayed": True}
elif edit_action == "auto":
kwargs = {"start_type": "auto", "start_delayed": False}
else:
kwargs = {"start_type": edit_action}
r = agent.salt_api_cmd(
timeout=20,
func="service.modify",
arg=service_name,
kwargs=kwargs,
)
agent = get_object_or_404(Agent, pk=request.data["pk"])
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
data = {
"func": "editwinsvc",
"payload": {
"name": request.data["sv_name"],
"startType": request.data["edit_action"],
},
}
r = asyncio.run(agent.nats_cmd(data, timeout=10))
# response struct from agent: {success: bool, errormsg: string}
if r == "timeout":
return notify_error("Unable to contact the agent")
elif r == "error" or not r:
return notify_error("Something went wrong")
elif not r["success"] and r["errormsg"]:
return notify_error(r["errormsg"])
elif r["success"]:
return Response("ok")
return Response("ok")
return notify_error("Something went wrong")

View File

@@ -1,3 +1,4 @@
import asyncio
import string
from time import sleep
from loguru import logger
@@ -89,35 +90,36 @@ def update_chocos():
@app.task
def get_installed_software(pk):
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_cmd(
timeout=30,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
if not agent.has_nats:
logger.error(f"{agent.salt_id} software list only available in agent >= 1.1.0")
return
if r == "timeout" or r == "error":
logger.error(f"Timed out trying to get installed software on {agent.salt_id}")
r = asyncio.run(agent.nats_cmd({"func": "softwarelist"}, timeout=20))
if r == "timeout" or r == "natsdown":
logger.error(f"{agent.salt_id} {r}")
return
printable = set(string.printable)
try:
software = [
sw = []
for s in r:
sw.append(
{
"name": "".join(filter(lambda x: x in printable, k)),
"version": "".join(filter(lambda x: x in printable, v)),
"name": "".join(filter(lambda x: x in printable, s["name"])),
"version": "".join(filter(lambda x: x in printable, s["version"])),
"publisher": "".join(filter(lambda x: x in printable, s["publisher"])),
"install_date": s["install_date"],
"size": s["size"],
"source": s["source"],
"location": s["location"],
"uninstall": s["uninstall"],
}
for k, v in r.items()
]
except Exception as e:
logger.error(f"Unable to get installed software on {agent.salt_id}: {e}")
return
)
if not InstalledSoftware.objects.filter(agent=agent).exists():
InstalledSoftware(agent=agent, software=software).save()
InstalledSoftware(agent=agent, software=sw).save()
else:
s = agent.installedsoftware_set.get()
s.software = software
s = agent.installedsoftware_set.first()
s.software = sw
s.save(update_fields=["software"])
return "ok"

View File

@@ -62,72 +62,6 @@ class TestSoftwareViews(TacticalTestCase):
self.check_not_authenticated("get", url)
@patch("agents.models.Agent.salt_api_cmd")
def test_chocos_refresh(self, salt_api_cmd):
salt_return = {"git": "2.3.4", "docker": "1.0.2"}
# test a call where agent doesn't exist
resp = self.client.get("/software/refresh/500/", format="json")
self.assertEqual(resp.status_code, 404)
agent = baker.make_recipe("agents.agent")
url = f"/software/refresh/{agent.pk}/"
# test failed attempt
salt_api_cmd.return_value = "timeout"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(
timeout=20,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
salt_api_cmd.reset_mock()
salt_api_cmd.return_value = "error"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
salt_api_cmd.assert_called_with(
timeout=20,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
salt_api_cmd.reset_mock()
# test success and created new software object
salt_api_cmd.return_value = salt_return
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(
timeout=20,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
self.assertTrue(InstalledSoftware.objects.filter(agent=agent).exists())
salt_api_cmd.reset_mock()
# test success and updates software object
salt_api_cmd.return_value = salt_return
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
salt_api_cmd.assert_called_with(
timeout=20,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
software = agent.installedsoftware_set.get()
expected = [
{"name": "git", "version": "2.3.4"},
{"name": "docker", "version": "1.0.2"},
]
self.assertTrue(InstalledSoftware.objects.filter(agent=agent).exists())
self.assertEquals(software.software, expected)
self.check_not_authenticated("get", url)
class TestSoftwareTasks(TacticalTestCase):
@patch("agents.models.Agent.salt_api_cmd")
@@ -186,43 +120,57 @@ class TestSoftwareTasks(TacticalTestCase):
salt_api_cmd.assert_any_call(timeout=200, func="chocolatey.list")
self.assertEquals(salt_api_cmd.call_count, 2)
@patch("agents.models.Agent.salt_api_cmd")
def test_get_installed_software(self, salt_api_cmd):
@patch("agents.models.Agent.nats_cmd")
def test_get_installed_software(self, nats_cmd):
from .tasks import get_installed_software
agent = baker.make_recipe("agents.agent")
salt_return = {"git": "2.3.4", "docker": "1.0.2"}
# test failed attempt
salt_api_cmd.return_value = "timeout"
ret = get_installed_software(agent.pk)
self.assertFalse(ret)
salt_api_cmd.assert_called_with(
timeout=30,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
salt_api_cmd.reset_mock()
# test successful attempt
salt_api_cmd.return_value = salt_return
ret = get_installed_software(agent.pk)
self.assertTrue(ret)
salt_api_cmd.assert_called_with(
timeout=30,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
software = agent.installedsoftware_set.get()
expected = [
{"name": "git", "version": "2.3.4"},
{"name": "docker", "version": "1.0.2"},
nats_return = [
{
"name": "Mozilla Maintenance Service",
"size": "336.9 kB",
"source": "",
"version": "73.0.1",
"location": "",
"publisher": "Mozilla",
"uninstall": '"C:\\Program Files (x86)\\Mozilla Maintenance Service\\uninstall.exe"',
"install_date": "0001-01-01 00:00:00 +0000 UTC",
},
{
"name": "OpenVPN 2.4.9-I601-Win10 ",
"size": "8.7 MB",
"source": "",
"version": "2.4.9-I601-Win10",
"location": "C:\\Program Files\\OpenVPN\\",
"publisher": "OpenVPN Technologies, Inc.",
"uninstall": "C:\\Program Files\\OpenVPN\\Uninstall.exe",
"install_date": "0001-01-01 00:00:00 +0000 UTC",
},
{
"name": "Microsoft Office Professional Plus 2019 - en-us",
"size": "0 B",
"source": "",
"version": "16.0.10368.20035",
"location": "C:\\Program Files\\Microsoft Office",
"publisher": "Microsoft Corporation",
"uninstall": '"C:\\Program Files\\Common Files\\Microsoft Shared\\ClickToRun\\OfficeClickToRun.exe" scenario=install scenariosubtype=ARP sourcetype=None productstoremove=ProPlus2019Volume.16_en-us_x-none culture=en-us version.16=16.0',
"install_date": "0001-01-01 00:00:00 +0000 UTC",
},
]
self.assertTrue(InstalledSoftware.objects.filter(agent=agent).exists())
self.assertEquals(software.software, expected)
# test failed attempt
nats_cmd.return_value = "timeout"
ret = get_installed_software(agent.pk)
self.assertFalse(ret)
nats_cmd.assert_called_with({"func": "softwarelist"}, timeout=20)
nats_cmd.reset_mock()
# test successful attempt
nats_cmd.return_value = nats_return
ret = get_installed_software(agent.pk)
self.assertTrue(ret)
nats_cmd.assert_called_with({"func": "softwarelist"}, timeout=20)
@patch("agents.models.Agent.salt_api_cmd")
@patch("software.tasks.get_installed_software.delay")

View File

@@ -1,3 +1,4 @@
import asyncio
import string
from django.shortcuts import get_object_or_404
@@ -41,35 +42,34 @@ def get_installed(request, pk):
@api_view()
def refresh_installed(request, pk):
agent = get_object_or_404(Agent, pk=pk)
r = agent.salt_api_cmd(
timeout=20,
func="pkg.list_pkgs",
kwargs={"include_components": False, "include_updates": False},
)
if not agent.has_nats:
return notify_error("Requires agent version 1.1.0 or greater")
if r == "timeout":
r = asyncio.run(agent.nats_cmd({"func": "softwarelist"}, timeout=15))
if r == "timeout" or r == "natsdown":
return notify_error("Unable to contact the agent")
elif r == "error":
return notify_error("Something went wrong")
printable = set(string.printable)
try:
software = [
sw = []
for s in r:
sw.append(
{
"name": "".join(filter(lambda x: x in printable, k)),
"version": "".join(filter(lambda x: x in printable, v)),
"name": "".join(filter(lambda x: x in printable, s["name"])),
"version": "".join(filter(lambda x: x in printable, s["version"])),
"publisher": "".join(filter(lambda x: x in printable, s["publisher"])),
"install_date": s["install_date"],
"size": s["size"],
"source": s["source"],
"location": s["location"],
"uninstall": s["uninstall"],
}
for k, v in r.items()
]
except Exception:
return notify_error("Something went wrong")
)
if not InstalledSoftware.objects.filter(agent=agent).exists():
InstalledSoftware(agent=agent, software=software).save()
InstalledSoftware(agent=agent, software=sw).save()
else:
s = agent.installedsoftware_set.get()
s.software = software
s = agent.installedsoftware_set.first()
s.software = sw
s.save(update_fields=["software"])
return Response("ok")

View File

@@ -27,15 +27,15 @@ app.conf.beat_schedule = {
},
"auto-approve-win-updates": {
"task": "winupdate.tasks.auto_approve_updates_task",
"schedule": crontab(minute=0, hour="*/8"),
"schedule": crontab(minute=2, hour="*/8"),
},
"install-scheduled-win-updates": {
"task": "winupdate.tasks.check_agent_update_schedule_task",
"schedule": crontab(minute=0, hour="*"),
"schedule": crontab(minute=5, hour="*"),
},
"sync-modules": {
"task": "agents.tasks.batch_sync_modules_task",
"schedule": crontab(minute=40, hour="*/4"),
"schedule": crontab(minute=25, hour="*/4"),
},
"sys-info": {
"task": "agents.tasks.batch_sysinfo_task",
@@ -43,11 +43,11 @@ app.conf.beat_schedule = {
},
"update-salt": {
"task": "agents.tasks.update_salt_minion_task",
"schedule": crontab(minute=30, hour="*/6"),
"schedule": crontab(minute=20, hour="*/6"),
},
"agent-auto-update": {
"task": "agents.tasks.auto_self_agent_update_task",
"schedule": crontab(minute=50, hour="*/3"),
"schedule": crontab(minute=35, hour="*"),
},
}

View File

@@ -37,7 +37,6 @@ if not DEBUG:
)
})
SALT_USERNAME = "changeme"
SALT_PASSWORD = "changeme"
MESH_USERNAME = "changeme"

View File

@@ -1,8 +1,13 @@
import os
from pathlib import Path
from datetime import timedelta
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SCRIPTS_DIR = "/srv/salt/scripts"
DOCKER_BUILD = False
LOG_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/log")
EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
@@ -10,23 +15,25 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
AUTH_USER_MODEL = "accounts.User"
# latest release
TRMM_VERSION = "0.1.2"
TRMM_VERSION = "0.2.1"
# bump this version everytime vue code is changed
# to alert user they need to manually refresh their browser
APP_VER = "0.0.84"
APP_VER = "0.0.91"
# https://github.com/wh1te909/salt
LATEST_SALT_VER = "1.1.0"
# https://github.com/wh1te909/rmmagent
LATEST_AGENT_VER = "1.0.1"
LATEST_AGENT_VER = "1.1.0"
MESH_VER = "0.6.62"
MESH_VER = "0.6.84"
SALT_MASTER_VER = "3002.2"
# for the update script, bump when need to recreate venv or npm install
PIP_VER = "1"
NPM_VER = "1"
PIP_VER = "3"
NPM_VER = "2"
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
@@ -173,6 +180,7 @@ if "TRAVIS" in os.environ:
ADMIN_URL = "abc123456/"
SCRIPTS_DIR = os.path.join(Path(BASE_DIR).parents[1], "scripts")
SALT_USERNAME = "travis"
SALT_PASSWORD = "travis"
MESH_USERNAME = "travis"
@@ -205,6 +213,7 @@ if "AZPIPELINE" in os.environ:
ADMIN_URL = "abc123456/"
SCRIPTS_DIR = os.path.join(Path(BASE_DIR).parents[1], "scripts")
SALT_USERNAME = "pipeline"
SALT_PASSWORD = "pipeline"
MESH_USERNAME = "pipeline"

View File

@@ -1,24 +1,11 @@
import json
import os
import random
import string
from django.test import TestCase, override_settings
from django.utils import timezone as djangotime
from django.conf import settings
from model_bakery import baker
from rest_framework.test import APIClient
from rest_framework.authtoken.models import Token
from accounts.models import User
from agents.models import Agent
from winupdate.models import WinUpdatePolicy
from clients.models import Client, Site
from automation.models import Policy
from core.models import CoreSettings
from checks.models import Check
from autotasks.models import AutomatedTask
from rest_framework.authtoken.models import Token
class TacticalTestCase(TestCase):
@@ -29,6 +16,12 @@ class TacticalTestCase(TestCase):
self.client_setup()
self.client.force_authenticate(user=self.john)
def setup_agent_auth(self, agent):
agent_user = User.objects.create_user(
username=agent.agent_id, password=User.objects.make_random_password(60)
)
Token.objects.create(user=agent_user)
def client_setup(self):
self.client = APIClient()
@@ -51,62 +44,6 @@ class TacticalTestCase(TestCase):
r = switch.get(method)
self.assertEqual(r.status_code, 401)
def agent_setup(self):
self.agent = Agent.objects.create(
operating_system="Windows 10",
plat="windows",
plat_release="windows-Server2019",
hostname="DESKTOP-TEST123",
salt_id="aksdjaskdjs",
local_ip="10.0.25.188",
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
services=[
{
"pid": 880,
"name": "AeLookupSvc",
"status": "stopped",
"binpath": "C:\\Windows\\system32\\svchost.exe -k netsvcs",
"username": "localSystem",
"start_type": "manual",
"description": "Processes application compatibility cache requests for applications as they are launched",
"display_name": "Application Experience",
},
{
"pid": 812,
"name": "ALG",
"status": "stopped",
"binpath": "C:\\Windows\\System32\\alg.exe",
"username": "NT AUTHORITY\\LocalService",
"start_type": "manual",
"description": "Provides support for 3rd party protocol plug-ins for Internet Connection Sharing",
"display_name": "Application Layer Gateway Service",
},
],
public_ip="74.13.24.14",
total_ram=16,
used_ram=33,
disks={
"C:": {
"free": "42.3G",
"used": "17.1G",
"total": "59.5G",
"device": "C:",
"fstype": "NTFS",
"percent": 28,
}
},
boot_time=8173231.4,
logged_in_username="John",
client="Google",
site="Main Office",
monitoring_type="server",
description="Test PC",
mesh_node_id="abcdefghijklmnopAABBCCDD77443355##!!AI%@#$%#*",
last_seen=djangotime.now(),
)
self.update_policy = WinUpdatePolicy.objects.create(agent=self.agent)
def create_checks(self, policy=None, agent=None, script=None):
if not policy and not agent:
@@ -132,136 +69,3 @@ class TacticalTestCase(TestCase):
baker.make_recipe(recipe, policy=policy, agent=agent, script=script)
)
return checks
class BaseTestCase(TestCase):
def setUp(self):
self.john = User(username="john")
self.john.set_password("password")
self.john.save()
self.client = APIClient()
self.client.force_authenticate(user=self.john)
self.coresettings = CoreSettings.objects.create()
self.agent = self.create_agent("DESKTOP-TEST123", "Google", "Main Office")
self.agent_user = User.objects.create_user(
username=self.agent.agent_id, password=User.objects.make_random_password(60)
)
self.agent_token = Token.objects.create(user=self.agent_user)
self.update_policy = WinUpdatePolicy.objects.create(agent=self.agent)
Client.objects.create(client="Google")
Client.objects.create(client="Facebook")
google = Client.objects.get(client="Google")
facebook = Client.objects.get(client="Facebook")
Site.objects.create(client=google, site="Main Office")
Site.objects.create(client=google, site="LA Office")
Site.objects.create(client=google, site="MO Office")
Site.objects.create(client=facebook, site="Main Office")
Site.objects.create(client=facebook, site="NY Office")
self.policy = Policy.objects.create(
name="testpolicy",
desc="my awesome policy",
active=True,
)
self.policy.server_clients.add(google)
self.policy.workstation_clients.add(facebook)
self.agentDiskCheck = Check.objects.create(
agent=self.agent,
check_type="diskspace",
disk="C:",
threshold=41,
fails_b4_alert=4,
)
self.policyDiskCheck = Check.objects.create(
policy=self.policy,
check_type="diskspace",
disk="M:",
threshold=87,
fails_b4_alert=1,
)
self.policyTask = AutomatedTask.objects.create(
policy=self.policy, name="Test Task"
)
def check_not_authenticated(self, method, url):
self.client.logout()
switch = {
"get": self.client.get(url),
"post": self.client.post(url),
"put": self.client.put(url),
"patch": self.client.patch(url),
"delete": self.client.delete(url),
}
r = switch.get(method)
self.assertEqual(r.status_code, 401)
def create_agent(self, hostname, client, site, monitoring_type="server"):
with open(
os.path.join(
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
)
) as f:
wmi_py = json.load(f)
return Agent.objects.create(
operating_system="Windows 10",
plat="windows",
plat_release="windows-Server2019",
hostname=f"{hostname}",
salt_id=self.generate_agent_id(hostname),
local_ip="10.0.25.188",
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
services=[
{
"pid": 880,
"name": "AeLookupSvc",
"status": "stopped",
"binpath": "C:\\Windows\\system32\\svchost.exe -k netsvcs",
"username": "localSystem",
"start_type": "manual",
"description": "Processes application compatibility cache requests for applications as they are launched",
"display_name": "Application Experience",
},
{
"pid": 812,
"name": "ALG",
"status": "stopped",
"binpath": "C:\\Windows\\System32\\alg.exe",
"username": "NT AUTHORITY\\LocalService",
"start_type": "manual",
"description": "Provides support for 3rd party protocol plug-ins for Internet Connection Sharing",
"display_name": "Application Layer Gateway Service",
},
],
public_ip="74.13.24.14",
total_ram=16,
used_ram=33,
disks={
"C:": {
"free": "42.3G",
"used": "17.1G",
"total": "59.5G",
"device": "C:",
"fstype": "NTFS",
"percent": 28,
}
},
boot_time=8173231.4,
logged_in_username="John",
client=f"{client}",
site=f"{site}",
monitoring_type=monitoring_type,
description="Test PC",
mesh_node_id="abcdefghijklmnopAABBCCDD77443355##!!AI%@#$%#*",
last_seen=djangotime.now(),
wmi_detail=wmi_py,
)
def generate_agent_id(self, hostname):
rand = "".join(random.choice(string.ascii_letters) for _ in range(35))
return f"{rand}-{hostname}"

View File

@@ -1,4 +1,44 @@
import json
import os
import subprocess
import tldextract
from django.conf import settings
from rest_framework import status
from rest_framework.response import Response
from agents.models import Agent
notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
def reload_nats():
users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}]
agents = Agent.objects.prefetch_related("user").only("pk", "agent_id")
for agent in agents:
users.append({"user": agent.agent_id, "password": agent.user.auth_token.key})
if not settings.DOCKER_BUILD:
tld = tldextract.extract(settings.ALLOWED_HOSTS[0])
domain = tld.domain + "." + tld.suffix
cert_path = f"/etc/letsencrypt/live/{domain}"
else:
cert_path = "/opt/tactical/certs"
config = {
"tls": {
"cert_file": f"{cert_path}/fullchain.pem",
"key_file": f"{cert_path}/privkey.pem",
},
"authorization": {"users": users},
"max_payload": 2048576005,
}
conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf")
with open(conf, "w") as f:
json.dump(config, f)
if not settings.DOCKER_BUILD:
subprocess.run(
["/usr/local/bin/nats-server", "-signal", "reload"], capture_output=True
)

View File

@@ -14,7 +14,7 @@ class TestWinUpdateViews(TacticalTestCase):
def test_get_winupdates(self):
agent = baker.make_recipe("agents.agent")
winupdates = baker.make("winupdate.WinUpdate", agent=agent, _quantity=4)
baker.make("winupdate.WinUpdate", agent=agent, _quantity=4)
# test a call where agent doesn't exist
resp = self.client.get("/winupdate/500/getwinupdates/", format="json")
@@ -107,9 +107,11 @@ class WinupdateTasks(TacticalTestCase):
def setUp(self):
self.setup_coresettings()
baker.make("clients.Site", site="Default", client__client="Default")
self.online_agents = baker.make_recipe("agents.online_agent", _quantity=2)
self.offline_agent = baker.make_recipe("agents.agent")
site = baker.make("clients.Site")
self.online_agents = baker.make_recipe(
"agents.online_agent", site=site, _quantity=2
)
self.offline_agent = baker.make_recipe("agents.agent", site=site)
@patch("winupdate.tasks.check_for_updates_task.apply_async")
def test_auto_approve_task(self, check_updates_task):

18
backup.sh Normal file → Executable file
View File

@@ -1,7 +1,7 @@
#!/bin/bash
SCRIPT_VERSION="2"
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh'
SCRIPT_VERSION="3"
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
@@ -31,11 +31,25 @@ POSTGRES_PW="hunter2"
#####################################################
if [[ "$POSTGRES_USER" == "changeme" || "$POSTGRES_PW" == "hunter2" ]]; then
printf >&2 "${RED}You must change the postgres username/password at the top of this file.${NC}\n"
printf >&2 "${RED}Check the github readme for where to find them.${NC}\n"
exit 1
fi
if [ ! -d /rmmbackups ]; then
sudo mkdir /rmmbackups
sudo chown ${USER}:${USER} /rmmbackups
fi
if [ -d /meshcentral/meshcentral-backup ]; then
rm -f /meshcentral/meshcentral-backup/*
fi
if [ -d /meshcentral/meshcentral-coredumps ]; then
rm -f /meshcentral/meshcentral-coredumps/*
fi
dt_now=$(date '+%Y_%m_%d__%H_%M_%S')
tmp_dir=$(mktemp -d -t tacticalrmm-XXXXXXXXXXXXXXXXXXXXX)
sysd="/etc/systemd/system"

View File

@@ -1,24 +1,21 @@
MESH_HOST=mesh.example.com
MESH_USER=mesh
MESH_PASS=meshpass
EMAIL_USER=admin@example.com
IMAGE_REPO=tacticalrmm/
VERSION=latest
# tactical credentials (Used to login to dashboard)
TRMM_USER=tactical
TRMM_PASS=tactical
# dns settings
APP_HOST=app.example.com
API_HOST=api.example.com
MESH_HOST=mesh.example.com
# mesh settings
MESH_USER=meshcentral
MESH_PASS=meshcentralpass
MONGODB_USER=mongouser
MONGODB_PASSWORD=mongopass
# database settings
POSTGRES_USER=postgres
POSTGRES_PASS=pass
POSTGRES_HOST=db
APP_HOST=app.example.com
API_HOST=api.example.com
REDIS_HOST=redis
SALT_HOST=salt
SALT_USER=saltapi
SALT_PASS=password
ADMIN_URL=admin
DJANGO_SEKRET=secret12341234123412341234
DJANGO_DEBUG=False
POSTGRES_PASS=postgrespass

View File

@@ -1,64 +0,0 @@
user nginx;
worker_processes 1;
error_log /var/log/nginx/error.log warn;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
keepalive_timeout 65;
server_tokens off;
upstream tacticalrmm {
server unix:///app/tacticalrmm.sock;
}
server {
listen 80;
#server_name ${API_HOST};
client_max_body_size 300M;
access_log /var/log/nginx/api-access.log;
error_log /var/log/nginx/api-error.log;
location /static/ {
root /app;
}
location /private/ {
internal;
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
alias /app/tacticalrmm/private/;
}
location /saltscripts/ {
internal;
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
alias /srv/salt/scripts/userdefined/;
}
location /builtin/ {
internal;
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
alias /srv/salt/scripts/;
}
location / {
uwsgi_pass tacticalrmm;
include /etc/nginx/uwsgi_params;
uwsgi_read_timeout 9999s;
uwsgi_ignore_client_abort on;
}
}
}
daemon off;

View File

@@ -1,45 +0,0 @@
FROM tiangolo/uwsgi-nginx:python3.8
WORKDIR /app
ARG DJANGO_SEKRET
ARG DJANGO_DEBUG
ARG POSTGRES_USER
ARG POSTGRES_PASS
ARG POSTGRES_HOST
ARG SALT_HOST
ARG SALT_USER
ARG SALT_PASS
ARG REDIS_HOST
ARG MESH_USER
ARG MESH_HOST
ARG MESH_TOKEN_KEY
ARG APP_HOST
ARG API_HOST
ARG ADMIN_URL
EXPOSE 80
RUN apt-get update && apt-get install -y gettext-base wget
COPY ./api/tacticalrmm/requirements.txt .
RUN pip install --upgrade pip
RUN pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
RUN pip install --no-cache-dir -r requirements.txt
RUN wget --no-check-certificate https://golang.org/dl/go1.15.linux-amd64.tar.gz -P /tmp
COPY ./api/tacticalrmm/ .
COPY ./docker/api/prestart.sh .
COPY ./docker/api/uwsgi.ini .
COPY ./docker/api/api.conf /app/api.conf.tmp
RUN envsubst '\$APP_HOST, \$API_HOST' < /app/api.conf.tmp > /app/nginx.conf && \
rm /app/api.conf.tmp
COPY ./docker/api/local_settings.py.keep ./tacticalrmm/local_settings.py.tmp
RUN envsubst < /app/tacticalrmm/local_settings.py.tmp > /app/tacticalrmm/local_settings.py && rm /app/tacticalrmm/local_settings.py.tmp
RUN tar -xzf /tmp/go1.15.linux-amd64.tar.gz -C /tmp && \
mkdir /usr/local/rmmgo && \
mv /tmp/go /usr/local/rmmgo/ && \
rm -rf /tmp/go
RUN /usr/local/rmmgo/go/bin/go get github.com/josephspurrier/goversioninfo/cmd/goversioninfo && \
cp ./api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/ && \
chmod +x /usr/local/bin/goversioninfo

View File

@@ -1,47 +0,0 @@
SECRET_KEY = '${DJANGO_SEKRET}'
ALLOWED_HOSTS = ['${API_HOST}']
ADMIN_URL = "${ADMIN_URL}"
CORS_ORIGIN_WHITELIST = ["https://${APP_HOST}",]
DEBUG = ${DJANGO_DEBUG}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'tacticalrmm',
'USER': '${POSTGRES_USER}',
'PASSWORD': '${POSTGRES_PASS}',
'HOST': '${POSTGRES_HOST}',
'PORT': '5432',
}
}
REST_FRAMEWORK = {
'DATETIME_FORMAT': "%b-%d-%Y - %H:%M",
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'knox.auth.TokenAuthentication',
),
}
if not DEBUG:
REST_FRAMEWORK.update({
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
})
SALT_USERNAME = "${SALT_USER}"
SALT_PASSWORD = "${SALT_PASS}"
MESH_USERNAME = "${MESH_USER}"
MESH_SITE = "https://${MESH_HOST}"
MESH_WS_URL="ws://meshcentral:443"
MESH_TOKEN_KEY = "${MESH_TOKEN_KEY}"
REDIS_HOST = "${REDIS_HOST}"
SALT_HOST = "${SALT_HOST}"

View File

@@ -1,10 +0,0 @@
#! /usr/bin/env bash
sleep 10
python manage.py migrate --no-input
python manage.py collectstatic --no-input
python manage.py initial_db_setup
python manage.py initial_mesh_setup
python manage.py load_chocos
python manage.py fix_salt_key
python manage.py load_community_scripts

View File

@@ -1,14 +0,0 @@
[uwsgi]
logto = /app/tacticalrmm/private/log/uwsgi.log
chdir = /app
wsgi-file = tacticalrmm/wsgi.py
master = true
processes = 4
threads = 2
socket = /app/tacticalrmm.sock
# clear environment on exit
vacuum = true
die-on-term = true
max-requests = 500
max-requests-delta = 1000

View File

@@ -1,2 +0,0 @@
PROD_URL = "https://${API_HOST}"
DEV_URL = "https://${API_HOST}"

View File

@@ -1,16 +0,0 @@
server {
listen 80;
#server_name ${APP_HOST};
charset utf-8;
location / {
root /usr/share/nginx/html;
try_files $uri $uri/ /index.html;
add_header Cache-Control "no-store, no-cache, must-revalidate";
add_header Pragma "no-cache";
}
error_log /var/log/nginx/app-error.log;
access_log /var/log/nginx/app-access.log;
}

View File

@@ -1,19 +0,0 @@
FROM node:12-alpine AS builder
ARG APP_HOST
ARG API_HOST
EXPOSE 80
WORKDIR /home/node
RUN apk add gettext
COPY ./web/package.json .
RUN npm install
COPY ./docker/app/.env.keep /home/.env.tmp
RUN envsubst '\$API_HOST' < /home/.env.tmp > /home/node/.env && rm /home/.env.tmp
COPY ./docker/app/app.conf /home/node/app.conf.tmp
RUN envsubst '\$APP_HOST' < /home/node/app.conf.tmp > /home/node/app.conf
COPY ./web .
RUN npm run build
FROM nginx:alpine
WORKDIR /usr/share/nginx/html
COPY --from=builder /home/node/dist .
COPY --from=builder /home/node/app.conf /etc/nginx/conf.d/default.conf

View File

@@ -0,0 +1,29 @@
FROM node:12-alpine AS builder
WORKDIR /home/node/app
COPY ./web/package.json .
RUN npm install
COPY ./web .
# copy env file to set DOCKER_BUILD to true
RUN echo "DOCKER_BUILD=1" > .env
# modify index.html template to allow injection of js variables at runtime
RUN sed -i '/<\/head>/i <script src="\/env-config.js"><\/script>' src/index.template.html
RUN npm run build
FROM nginx:stable-alpine
ENV PUBLIC_DIR /usr/share/nginx/html
RUN apk add --no-cache bash
SHELL ["/bin/bash", "-c"]
COPY --from=builder /home/node/app/dist/ ${PUBLIC_DIR}
COPY docker/containers/tactical-frontend/entrypoint.sh /docker-entrypoint.d/
RUN chmod +x /docker-entrypoint.d/entrypoint.sh
EXPOSE 80

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
#
# https://www.freecodecamp.org/news/how-to-implement-runtime-environment-variables-with-create-react-app-docker-and-nginx-7f9d42a91d70/
#
# Recreate js config file on start
rm -rf ${PUBLIC_DIR}/env-config.js
touch ${PUBLIC_DIR}/env-config.js
# Add runtime base url assignment
echo "window._env_ = {PROD_URL: \"https://${API_HOST}\"}" >> ${PUBLIC_DIR}/env-config.js
nginx_config="$(cat << EOF
server {
listen 80;
charset utf-8;
location / {
root /usr/share/nginx/html;
try_files \$uri \$uri/ /index.html;
add_header Cache-Control "no-store, no-cache, must-revalidate";
add_header Pragma "no-cache";
}
error_log /var/log/nginx/app-error.log;
access_log /var/log/nginx/app-access.log;
}
EOF
)"
echo "${nginx_config}" > /etc/nginx/conf.d/default.conf

View File

@@ -0,0 +1,18 @@
FROM node:12-alpine
WORKDIR /home/node/app
ENV TACTICAL_DIR /opt/tactical
RUN apk add --no-cache bash
SHELL ["/bin/bash", "-c"]
RUN npm install meshcentral@0.6.62
COPY docker/containers/tactical-meshcentral/entrypoint.sh /
RUN chmod +x /entrypoint.sh
EXPOSE 80 443
ENTRYPOINT [ "/entrypoint.sh" ]

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env bash
set -e
: "${MESH_USER:=meshcentral}"
: "${MESH_PASS:=meshcentralpass}"
: "${MONGODB_USER:=mongouser}"
: "${MONGODB_PASSWORD:=mongopass}"
: "${MONGODB_HOST:=tactical-mongodb}"
: "${MONGODB_PORT:=27017}"
: "${NGINX_HOST_IP:=172.20.0.20}"
mkdir -p /home/node/app/meshcentral-data
mkdir -p ${TACTICAL_DIR}/tmp
mesh_config="$(cat << EOF
{
"settings": {
"mongodb": "mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}",
"Cert": "${MESH_HOST}",
"TLSOffload": "${NGINX_HOST_IP}",
"RedirPort": 80,
"WANonly": true,
"Minify": 1,
"Port": 443,
"AllowLoginToken": true,
"AllowFraming": true,
"_AgentPing": 60,
"AgentPong": 300,
"AllowHighQualityDesktop": true,
"MaxInvalidLogin": {
"time": 5,
"count": 5,
"coolofftime": 30
}
},
"domains": {
"": {
"Title": "Tactical RMM",
"Title2": "TacticalRMM",
"NewAccounts": false,
"mstsc": true,
"GeoLocation": true,
"CertUrl": "https://${NGINX_HOST_IP}:443",
"httpheaders": {
"Strict-Transport-Security": "max-age=360000",
"_x-frame-options": "sameorigin",
"Content-Security-Policy": "default-src 'none'; script-src 'self' 'unsafe-inline'; connect-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-src 'self'; media-src 'self'"
}
}
}
}
EOF
)"
echo "${mesh_config}" > /home/node/app/meshcentral-data/config.json
node node_modules/meshcentral --createaccount ${MESH_USER} --pass ${MESH_PASS} --email example@example.com
node node_modules/meshcentral --adminaccount ${MESH_USER}
if [ ! -f "${TACTICAL_DIR}/tmp/mesh_token" ]; then
node node_modules/meshcentral --logintokenkey > ${TACTICAL_DIR}/tmp/mesh_token
fi
# start mesh
node node_modules/meshcentral

View File

@@ -0,0 +1,15 @@
FROM nats:2.1-alpine
ENV TACTICAL_DIR /opt/tactical
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
RUN apk add --no-cache inotify-tools supervisor bash
SHELL ["/bin/bash", "-c"]
COPY docker/containers/tactical-nats/entrypoint.sh /
RUN chmod +x /entrypoint.sh
ENTRYPOINT [ "/entrypoint.sh" ]
EXPOSE 4222

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -e
sleep 15
until [ -f "${TACTICAL_READY_FILE}" ]; do
echo "waiting for init container to finish install or update..."
sleep 10
done
mkdir -p /var/log/supervisor
mkdir -p /etc/supervisor/conf.d
supervisor_config="$(cat << EOF
[supervisord]
nodaemon=true
[include]
files = /etc/supervisor/conf.d/*.conf
[program:nats-server]
command=nats-server -DVV --config "${TACTICAL_DIR}/api/nats-rmm.conf"
stdout_logfile=/dev/fd/1
stdout_logfile_maxbytes=0
redirect_stderr=true
[program:config-watcher]
command="inotifywait -m -e close_write ${TACTICAL_DIR}/api/nats-rmm.conf"; | while read events; do "nats-server --signal reload"; done;
stdout_logfile=/dev/fd/1
stdout_logfile_maxbytes=0
redirect_stderr=true
EOF
)"
echo "${supervisor_config}" > /etc/supervisor/conf.d/supervisor.conf
# run supervised processes
/usr/bin/supervisord -c /etc/supervisor/conf.d/supervisor.conf

View File

@@ -0,0 +1,12 @@
FROM nginx:stable-alpine
ENV TACTICAL_DIR /opt/tactical
RUN apk add --no-cache openssl bash
SHELL ["/bin/bash", "-c"]
COPY docker/containers/tactical-nginx/entrypoint.sh /docker-entrypoint.d/
RUN chmod +x /docker-entrypoint.d/entrypoint.sh
EXPOSE 443 80

Some files were not shown because too many files have changed in this diff Show More