Compare commits
170 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b3cec06b3 | ||
|
|
8536754d14 | ||
|
|
1f36235801 | ||
|
|
a4194b14f9 | ||
|
|
2dcc629d9d | ||
|
|
98ddadc6bc | ||
|
|
f6e47b7383 | ||
|
|
f073ddc906 | ||
|
|
3e00631925 | ||
|
|
9b7ac58562 | ||
|
|
f242ddd801 | ||
|
|
c129886fe2 | ||
|
|
f577e814cf | ||
|
|
c860a0cedd | ||
|
|
ae7e28e492 | ||
|
|
90a63234ad | ||
|
|
14bca52e8f | ||
|
|
2f3c3361cf | ||
|
|
4034134055 | ||
|
|
c04f94cb7b | ||
|
|
fd1bbc7925 | ||
|
|
ff69bed394 | ||
|
|
d6e8c5146f | ||
|
|
9a04cf99d7 | ||
|
|
86e7c11e71 | ||
|
|
361cc08faa | ||
|
|
70dc771052 | ||
|
|
c14873a799 | ||
|
|
bba5abd74b | ||
|
|
a224e79c1f | ||
|
|
c305d98186 | ||
|
|
7c5a473e71 | ||
|
|
5e0f5d1eed | ||
|
|
238b269bc4 | ||
|
|
0ad121b9d2 | ||
|
|
7088acd9fd | ||
|
|
e0a900d4b6 | ||
|
|
a0fe2f0c7d | ||
|
|
d5b9bc2f26 | ||
|
|
584254e6ca | ||
|
|
a2963ed7bb | ||
|
|
2a3c2e133d | ||
|
|
3e7dcb2755 | ||
|
|
faeec00b39 | ||
|
|
eeed81392f | ||
|
|
95dce9e992 | ||
|
|
502bd2a191 | ||
|
|
17ac92a9d0 | ||
|
|
ba028cde0c | ||
|
|
6e751e7a9b | ||
|
|
948b56d0e6 | ||
|
|
4bf2dc9ece | ||
|
|
125823f8ab | ||
|
|
24d33397e9 | ||
|
|
2c553825f4 | ||
|
|
198c485e9a | ||
|
|
0138505507 | ||
|
|
5d50dcc600 | ||
|
|
7bdd8c4626 | ||
|
|
fc82c35f0c | ||
|
|
426ebad300 | ||
|
|
1afe61c593 | ||
|
|
c20751829b | ||
|
|
a3b8ee8392 | ||
|
|
156c0fe7f6 | ||
|
|
216f7a38cf | ||
|
|
fd04dc10d4 | ||
|
|
d39bdce926 | ||
|
|
c6e01245b0 | ||
|
|
c168ee7ba4 | ||
|
|
7575253000 | ||
|
|
c28c1efbb1 | ||
|
|
e6aa2c3b78 | ||
|
|
ab7c481f83 | ||
|
|
84ad1c352d | ||
|
|
e9aad39ac9 | ||
|
|
c3444a87bc | ||
|
|
67b224b340 | ||
|
|
bded14d36b | ||
|
|
73fa0b6631 | ||
|
|
2f07337588 | ||
|
|
da163d44e7 | ||
|
|
56fbf8ae0c | ||
|
|
327eb4b39b | ||
|
|
ae7873a7e3 | ||
|
|
9a5f01813b | ||
|
|
0605a3b725 | ||
|
|
09c535f159 | ||
|
|
7fb11da5df | ||
|
|
9c9a46499a | ||
|
|
6fca60261e | ||
|
|
00537b32ef | ||
|
|
8636758a90 | ||
|
|
e39dfbd624 | ||
|
|
6e048b2a12 | ||
|
|
f9657599c2 | ||
|
|
42ae3bba9b | ||
|
|
2fd56a4bfe | ||
|
|
824bcc5603 | ||
|
|
4fbb613aaa | ||
|
|
9eb45270f2 | ||
|
|
75c61c53e8 | ||
|
|
2688a47436 | ||
|
|
fe3bf4b189 | ||
|
|
456cb5ebb2 | ||
|
|
3d91d574b4 | ||
|
|
54876c5499 | ||
|
|
d256585284 | ||
|
|
bd8f100b43 | ||
|
|
44f05f2dcc | ||
|
|
43f7f82bdc | ||
|
|
e902f63211 | ||
|
|
129f68e194 | ||
|
|
4b37fe12d7 | ||
|
|
6de79922c5 | ||
|
|
e1a9791f44 | ||
|
|
81795f51c6 | ||
|
|
68dfb11155 | ||
|
|
39fc1beb89 | ||
|
|
fe0ddec0f9 | ||
|
|
9b52b4efd9 | ||
|
|
e90e527603 | ||
|
|
a510854741 | ||
|
|
8935ce4ccf | ||
|
|
f9edc9059a | ||
|
|
db8917a769 | ||
|
|
c2d70cc1c2 | ||
|
|
3b13c7f9ce | ||
|
|
b7150d8026 | ||
|
|
041830a7f8 | ||
|
|
a18daf0195 | ||
|
|
5d3dfceb22 | ||
|
|
c82855e732 | ||
|
|
956f156018 | ||
|
|
9b13c35e7f | ||
|
|
bc8e637bba | ||
|
|
f03c28c906 | ||
|
|
e4b1f39fdc | ||
|
|
4780af910c | ||
|
|
d61ce5c524 | ||
|
|
20ab151f4d | ||
|
|
8a7be7543a | ||
|
|
3f806aec9c | ||
|
|
6c273b32bb | ||
|
|
b986f9d6ee | ||
|
|
c98cca6b7b | ||
|
|
fbec78ede5 | ||
|
|
c1d9a2d1f1 | ||
|
|
8a10036f32 | ||
|
|
924a3aec0e | ||
|
|
3b3ac31541 | ||
|
|
e0cb2f9d0f | ||
|
|
549b4edb59 | ||
|
|
67c912aca2 | ||
|
|
a74dde5d9e | ||
|
|
f7bcd24726 | ||
|
|
337c900770 | ||
|
|
e83e73ead4 | ||
|
|
24f6f9b063 | ||
|
|
5dc999360e | ||
|
|
9ec2f6b64d | ||
|
|
f970592efe | ||
|
|
7592c11e99 | ||
|
|
759b05e137 | ||
|
|
42ebd9ffce | ||
|
|
bc0fc33966 | ||
|
|
f4aab16e39 | ||
|
|
e91425287c | ||
|
|
f05908f570 | ||
|
|
8b351edf9c |
5
.dockerignore
Normal file
5
.dockerignore
Normal file
@@ -0,0 +1,5 @@
|
||||
.git
|
||||
.cache
|
||||
**/*.env
|
||||
**/env
|
||||
**/node_modules
|
||||
88
.github/workflows/docker-build-push.yml
vendored
Normal file
88
.github/workflows/docker-build-push.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
name: Publish Tactical Docker Images
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
jobs:
|
||||
docker:
|
||||
name: Build and Push Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Github Tag
|
||||
id: prep
|
||||
run: |
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Tactical Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
|
||||
|
||||
- name: Build and Push Tactical MeshCentral Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-meshcentral/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
|
||||
|
||||
- name: Build and Push Tactical NATS Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nats/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
- name: Build and Push Tactical Salt Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-salt/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-salt:${{ steps.prep.outputs.version }},tacticalrmm/tactical-salt:latest
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-frontend/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
|
||||
|
||||
- name: Build and Push Tactical Nginx Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nginx/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nginx:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nginx:latest
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -42,3 +42,6 @@ api/tacticalrmm/accounts/management/commands/random_data.py
|
||||
versioninfo.go
|
||||
resource.syso
|
||||
htmlcov/
|
||||
docker-compose.dev.yml
|
||||
docs/.vuepress/dist
|
||||
nats-rmm.conf
|
||||
|
||||
43
.travis.yml
43
.travis.yml
@@ -1,43 +0,0 @@
|
||||
dist: focal
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- language: node_js
|
||||
node_js: "12"
|
||||
before_install:
|
||||
- cd web
|
||||
install:
|
||||
- npm install
|
||||
script:
|
||||
- npm run test:unit
|
||||
|
||||
- language: python
|
||||
python: "3.8"
|
||||
services:
|
||||
- redis
|
||||
|
||||
addons:
|
||||
postgresql: "13"
|
||||
apt:
|
||||
packages:
|
||||
- postgresql-13
|
||||
|
||||
before_script:
|
||||
- psql -c 'CREATE DATABASE travisci;' -U postgres
|
||||
- psql -c "CREATE USER travisci WITH PASSWORD 'travisSuperSekret6645';" -U postgres
|
||||
- psql -c 'GRANT ALL PRIVILEGES ON DATABASE travisci TO travisci;' -U postgres
|
||||
- psql -c 'ALTER USER travisci CREATEDB;' -U postgres
|
||||
|
||||
before_install:
|
||||
- cd api/tacticalrmm
|
||||
|
||||
install:
|
||||
- pip install --no-cache-dir --upgrade pip
|
||||
- pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
|
||||
- pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
|
||||
|
||||
script:
|
||||
- coverage run manage.py test -v 2
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -2,7 +2,7 @@
|
||||
"python.pythonPath": "api/tacticalrmm/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm"
|
||||
"api/tacticalrmm",
|
||||
],
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.formatting.provider": "black",
|
||||
|
||||
25
README.md
25
README.md
@@ -1,6 +1,5 @@
|
||||
# Tactical RMM
|
||||
|
||||
[](https://travis-ci.com/wh1te909/tacticalrmm)
|
||||
[](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
|
||||
[](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
@@ -64,6 +63,7 @@ sudo ufw allow ssh
|
||||
sudo ufw allow http
|
||||
sudo ufw allow https
|
||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
||||
sudo ufw allow proto tcp from any to any port 4222
|
||||
sudo ufw enable && sudo ufw reload
|
||||
```
|
||||
|
||||
@@ -78,7 +78,7 @@ Create A record ```mesh.tacticalrmm.com``` for meshcentral
|
||||
Download the install script and run it
|
||||
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/install.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh
|
||||
chmod +x install.sh
|
||||
./install.sh
|
||||
```
|
||||
@@ -92,17 +92,17 @@ chmod +x install.sh
|
||||
From the app's dashboard, choose Agents > Install Agent to generate an installer.
|
||||
|
||||
## Updating
|
||||
Download and run [update.sh](./update.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh))
|
||||
Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
|
||||
chmod +x update.sh
|
||||
./update.sh
|
||||
```
|
||||
|
||||
## Backup
|
||||
Download [backup.sh](./backup.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh))
|
||||
Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
||||
```
|
||||
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
|
||||
|
||||
@@ -121,7 +121,7 @@ Copy backup file to new server
|
||||
|
||||
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/restore.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
||||
```
|
||||
|
||||
Run the restore script, passing it the backup tar file as the first argument
|
||||
@@ -129,14 +129,3 @@ Run the restore script, passing it the backup tar file as the first argument
|
||||
chmod +x restore.sh
|
||||
./restore.sh rmm-backup-xxxxxxx.tar
|
||||
```
|
||||
|
||||
## Using another ssl certificate
|
||||
During the install you can opt out of using the Let's Encrypt certificate. If you do this the script will create a self-signed certificate, so that https continues to work. You can replace the certificates in /certs/example.com/(privkey.pem | pubkey.pem) with your own.
|
||||
|
||||
If you are migrating from Let's Encrypt to another certificate provider, you can create the /certs directory and copy your certificates there. It is recommended to do this because this directory will be backed up with the backup script provided. Then modify the nginx configurations to use your new certificates
|
||||
|
||||
The cert that is generated is a wildcard certificate and is used in the nginx configurations: rmm.conf, api.conf, and mesh.conf. If you can't generate wildcard certificates you can create a cert for each subdomain and configure each nginx configuration file to use its own certificate. Then restart nginx:
|
||||
|
||||
```
|
||||
sudo systemctl restart nginx
|
||||
```
|
||||
@@ -20,6 +20,5 @@ omit =
|
||||
*/urls.py
|
||||
*/tests.py
|
||||
*/test.py
|
||||
api/*.py
|
||||
checks/utils.py
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ def get_wmi_data():
|
||||
agent = Recipe(
|
||||
Agent,
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.1.1",
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
||||
|
||||
18
api/tacticalrmm/agents/migrations/0025_auto_20201122_0407.py
Normal file
18
api/tacticalrmm/agents/migrations/0025_auto_20201122_0407.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-22 04:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0024_auto_20201101_2319'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recoveryaction',
|
||||
name='mode',
|
||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC')], default='mesh', max_length=50),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/agents/migrations/0026_auto_20201125_2334.py
Normal file
18
api/tacticalrmm/agents/migrations/0026_auto_20201125_2334.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-25 23:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0025_auto_20201122_0407'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recoveryaction',
|
||||
name='mode',
|
||||
field=models.CharField(choices=[('salt', 'Salt'), ('mesh', 'Mesh'), ('command', 'Command'), ('rpc', 'Nats RPC'), ('checkrunner', 'Checkrunner')], default='mesh', max_length=50),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,4 @@
|
||||
import requests
|
||||
import datetime as dt
|
||||
import time
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
@@ -7,13 +6,14 @@ from Crypto.Random import get_random_bytes
|
||||
from Crypto.Hash import SHA3_384
|
||||
from Crypto.Util.Padding import pad
|
||||
import validators
|
||||
import random
|
||||
import msgpack
|
||||
import re
|
||||
import string
|
||||
from collections import Counter
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from distutils.version import LooseVersion
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
@@ -82,6 +82,14 @@ class Agent(BaseAuditModel):
|
||||
def client(self):
|
||||
return self.site.client
|
||||
|
||||
@property
|
||||
def has_nats(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
||||
|
||||
@property
|
||||
def has_gotasks(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
@@ -142,11 +150,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def has_patches_pending(self):
|
||||
|
||||
if self.winupdates.filter(action="approve").filter(installed=False).exists():
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return self.winupdates.filter(action="approve").filter(installed=False).exists()
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
@@ -433,6 +437,37 @@ class Agent(BaseAuditModel):
|
||||
except Exception:
|
||||
return "err"
|
||||
|
||||
async def nats_cmd(self, data, timeout=30, wait=True):
|
||||
nc = NATS()
|
||||
options = {
|
||||
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": "tacticalrmm",
|
||||
"password": settings.SECRET_KEY,
|
||||
"connect_timeout": 3,
|
||||
"max_reconnect_attempts": 2,
|
||||
}
|
||||
try:
|
||||
await nc.connect(**options)
|
||||
except:
|
||||
return "natsdown"
|
||||
|
||||
if wait:
|
||||
try:
|
||||
msg = await nc.request(
|
||||
self.agent_id, msgpack.dumps(data), timeout=timeout
|
||||
)
|
||||
except ErrTimeout:
|
||||
ret = "timeout"
|
||||
else:
|
||||
ret = msgpack.loads(msg.data)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
else:
|
||||
await nc.publish(self.agent_id, msgpack.dumps(data))
|
||||
await nc.flush()
|
||||
await nc.close()
|
||||
|
||||
def salt_api_cmd(self, **kwargs):
|
||||
|
||||
# salt should always timeout first before the requests' timeout
|
||||
@@ -511,6 +546,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
@@ -539,64 +575,6 @@ class Agent(BaseAuditModel):
|
||||
|
||||
return resp
|
||||
|
||||
def schedule_reboot(self, obj):
|
||||
|
||||
start_date = dt.datetime.strftime(obj, "%Y-%m-%d")
|
||||
start_time = dt.datetime.strftime(obj, "%H:%M")
|
||||
|
||||
# let windows task scheduler automatically delete the task after it runs
|
||||
end_obj = obj + dt.timedelta(minutes=15)
|
||||
end_date = dt.datetime.strftime(end_obj, "%Y-%m-%d")
|
||||
end_time = dt.datetime.strftime(end_obj, "%H:%M")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
r = self.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Windows\\System32\\shutdown.exe"',
|
||||
'arguments="/r /t 5 /f"',
|
||||
"trigger_type=Once",
|
||||
f'start_date="{start_date}"',
|
||||
f'start_time="{start_time}"',
|
||||
f'end_date="{end_date}"',
|
||||
f'end_time="{end_time}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"delete_after=Immediately",
|
||||
],
|
||||
)
|
||||
|
||||
if r == "error" or (isinstance(r, bool) and not r):
|
||||
return "failed"
|
||||
elif r == "timeout":
|
||||
return "timeout"
|
||||
elif isinstance(r, bool) and r:
|
||||
from logs.models import PendingAction
|
||||
|
||||
details = {
|
||||
"taskname": task_name,
|
||||
"time": str(obj),
|
||||
}
|
||||
PendingAction(agent=self, action_type="schedreboot", details=details).save()
|
||||
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return {"msg": {"time": nice_time, "agent": self.hostname}}
|
||||
else:
|
||||
return "failed"
|
||||
|
||||
def not_supported(self, version_added):
|
||||
if pyver.parse(self.version) < pyver.parse(version_added):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
pks = [] # list of pks to delete
|
||||
@@ -721,6 +699,8 @@ RECOVERY_CHOICES = [
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
("checkrunner", "Checkrunner"),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -36,12 +36,16 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
status = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
last_seen = serializers.SerializerMethodField()
|
||||
client_name = serializers.ReadOnlyField(source="client.name")
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
|
||||
def get_pending_actions(self, obj):
|
||||
return obj.pendingactions.filter(status="pending").count()
|
||||
|
||||
def get_last_seen(self, obj):
|
||||
if obj.time_zone is not None:
|
||||
agent_tz = pytz.timezone(obj.time_zone)
|
||||
@@ -62,6 +66,7 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"patches_pending",
|
||||
"pending_actions",
|
||||
"status",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
from time import sleep
|
||||
import random
|
||||
import requests
|
||||
from packaging import version as pyver
|
||||
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
from agents.models import Agent, AgentOutage
|
||||
from core.models import CoreSettings
|
||||
from logs.models import PendingAction
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -50,18 +50,41 @@ def send_agent_update_task(pks, version):
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
logger.info(
|
||||
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
|
||||
)
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
logger.info(f"{agent.salt_id}: {r}")
|
||||
sleep(10)
|
||||
|
||||
if agent.has_nats:
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
action = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
if pyver.parse(action.details["version"]) < pyver.parse(
|
||||
settings.LATEST_AGENT_VER
|
||||
):
|
||||
action.delete()
|
||||
else:
|
||||
continue
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": agent.winagent_dl,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": agent.win_inno_exe,
|
||||
},
|
||||
)
|
||||
# TODO
|
||||
# Salt is deprecated, remove this once salt is gone
|
||||
else:
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
sleep(5)
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -77,7 +100,6 @@ def auto_self_agent_update_task():
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
logger.info(f"Updating {len(agents)}")
|
||||
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
|
||||
@@ -104,44 +126,53 @@ def auto_self_agent_update_task():
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
logger.info(
|
||||
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
|
||||
)
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
logger.info(f"{agent.salt_id}: {r}")
|
||||
sleep(10)
|
||||
|
||||
if agent.has_nats:
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
action = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
if pyver.parse(action.details["version"]) < pyver.parse(
|
||||
settings.LATEST_AGENT_VER
|
||||
):
|
||||
action.delete()
|
||||
else:
|
||||
continue
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": agent.winagent_dl,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": agent.win_inno_exe,
|
||||
},
|
||||
)
|
||||
# TODO
|
||||
# Salt is deprecated, remove this once salt is gone
|
||||
else:
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
sleep(5)
|
||||
|
||||
|
||||
@app.task
|
||||
def update_salt_minion_task():
|
||||
q = Agent.objects.all()
|
||||
agents = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) >= pyver.parse("0.11.0")
|
||||
and pyver.parse(i.salt_ver) < pyver.parse(settings.LATEST_SALT_VER)
|
||||
def sync_sysinfo_task():
|
||||
agents = Agent.objects.all()
|
||||
online = [
|
||||
i
|
||||
for i in agents
|
||||
if pyver.parse(i.version) >= pyver.parse("1.1.3") and i.status == "online"
|
||||
]
|
||||
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_async(func="win_agent.update_salt")
|
||||
sleep(20)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_detail_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
|
||||
return "ok"
|
||||
for agent in online:
|
||||
asyncio.run(agent.nats_cmd({"func": "sync"}, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -160,7 +191,7 @@ def sync_salt_modules_task(pk):
|
||||
def batch_sync_modules_task():
|
||||
# sync modules, split into chunks of 50 agents to not overload salt
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents if i.status == "online"]
|
||||
online = [i.salt_id for i in agents]
|
||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
@@ -168,57 +199,43 @@ def batch_sync_modules_task():
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sysinfo_task():
|
||||
# update system info using WMI
|
||||
agents = Agent.objects.all()
|
||||
online = [
|
||||
i.salt_id
|
||||
for i in agents
|
||||
if not i.not_supported("0.11.0") and i.status == "online"
|
||||
]
|
||||
chunks = (online[i : i + 30] for i in range(0, len(online), 30))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="win_agent.local_sys_info")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def uninstall_agent_task(salt_id):
|
||||
def uninstall_agent_task(salt_id, has_nats):
|
||||
attempts = 0
|
||||
error = False
|
||||
|
||||
while 1:
|
||||
try:
|
||||
if not has_nats:
|
||||
while 1:
|
||||
try:
|
||||
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "local",
|
||||
"tgt": salt_id,
|
||||
"fun": "win_agent.uninstall_agent",
|
||||
"timeout": 8,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=10,
|
||||
)
|
||||
ret = r.json()["return"][0][salt_id]
|
||||
except Exception:
|
||||
attempts += 1
|
||||
else:
|
||||
if ret != "ok":
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "local",
|
||||
"tgt": salt_id,
|
||||
"fun": "win_agent.uninstall_agent",
|
||||
"timeout": 8,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=10,
|
||||
)
|
||||
ret = r.json()["return"][0][salt_id]
|
||||
except Exception:
|
||||
attempts += 1
|
||||
else:
|
||||
attempts = 0
|
||||
if ret != "ok":
|
||||
attempts += 1
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts >= 10:
|
||||
error = True
|
||||
break
|
||||
elif attempts == 0:
|
||||
break
|
||||
if attempts >= 10:
|
||||
error = True
|
||||
break
|
||||
elif attempts == 0:
|
||||
break
|
||||
|
||||
if error:
|
||||
logger.error(f"{salt_id} uninstall failed")
|
||||
@@ -284,19 +301,22 @@ def agent_recovery_sms_task(pk):
|
||||
|
||||
@app.task
|
||||
def agent_outages_task():
|
||||
agents = Agent.objects.only("pk")
|
||||
agents = Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert"
|
||||
)
|
||||
|
||||
for agent in agents:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
|
||||
@@ -7,20 +7,15 @@ from itertools import cycle
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from accounts.models import User
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import AgentSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from .models import Agent
|
||||
from .tasks import (
|
||||
auto_self_agent_update_task,
|
||||
update_salt_minion_task,
|
||||
get_wmi_detail_task,
|
||||
sync_salt_modules_task,
|
||||
batch_sync_modules_task,
|
||||
batch_sysinfo_task,
|
||||
OLD_64_PY_AGENT,
|
||||
OLD_32_PY_AGENT,
|
||||
)
|
||||
@@ -34,7 +29,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
client = baker.make("clients.Client", name="Google")
|
||||
site = baker.make("clients.Site", client=client, name="LA Office")
|
||||
self.agent = baker.make_recipe("agents.online_agent", site=site)
|
||||
self.agent = baker.make_recipe(
|
||||
"agents.online_agent", site=site, version="1.1.1"
|
||||
)
|
||||
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
|
||||
|
||||
def test_get_patch_policy(self):
|
||||
@@ -81,29 +78,29 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_ping(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_ping(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/ping/"
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "natsdown"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "pong"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "online"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "asdasjdaksdasd"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
@@ -111,39 +108,23 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
def test_uninstall(self, mock_task):
|
||||
@patch("agents.views.reload_nats")
|
||||
def test_uninstall(self, reload_nats, mock_task, nats_cmd):
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
r = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(self.agent.salt_id)
|
||||
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
||||
reload_nats.assert_called_once()
|
||||
mock_task.assert_called_with(self.agent.salt_id, True)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
def test_uninstall_catch_no_user(self, mock_task):
|
||||
# setup data
|
||||
agent_user = User.objects.create_user(
|
||||
username=self.agent.agent_id, password=User.objects.make_random_password(60)
|
||||
)
|
||||
agent_token = Token.objects.create(user=agent_user)
|
||||
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
agent_user.delete()
|
||||
|
||||
r = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(self.agent.salt_id)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_processes(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/getprocs/"
|
||||
|
||||
@@ -163,82 +144,61 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_kill_proc(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_kill_proc(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/8234/killproc/"
|
||||
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "process doesn't exist"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_event_log(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/eventlograw.json")
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
||||
) as f:
|
||||
mock_ret.return_value = json.load(f)
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
||||
) as f:
|
||||
decoded = json.load(f)
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(decoded, r.json())
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_power_action(self, mock_ret):
|
||||
url = f"/agents/poweraction/"
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_reboot_now(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {"pk": self.agent.pk, "action": "rebootnow"}
|
||||
mock_ret.return_value = True
|
||||
data = {"pk": self.agent.pk}
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "rebootnow"}, timeout=10)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_send_raw_cmd(self, mock_ret):
|
||||
url = f"/agents/sendrawcmd/"
|
||||
|
||||
@@ -257,36 +217,39 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_reboot_later(self, mock_ret):
|
||||
url = f"/agents/rebootlater/"
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_reboot_later(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "2025-08-29 18:41",
|
||||
}
|
||||
|
||||
mock_ret.return_value = True
|
||||
r = self.client.post(url, data, format="json")
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
||||
|
||||
mock_ret.return_value = "failed"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": r.data["task_name"],
|
||||
"year": 2025,
|
||||
"month": "August",
|
||||
"day": 29,
|
||||
"hour": 18,
|
||||
"min": 41,
|
||||
},
|
||||
}
|
||||
nats_cmd.assert_called_with(nats_data, timeout=10)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "error creating task"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -294,12 +257,12 @@ class TestAgentViews(TacticalTestCase):
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "rm -rf /",
|
||||
}
|
||||
r = self.client.post(url, data_invalid, format="json")
|
||||
r = self.client.patch(url, data_invalid, format="json")
|
||||
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Invalid date")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("os.path.exists")
|
||||
@patch("subprocess.run")
|
||||
@@ -469,7 +432,14 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertIn("&viewmode=13", r.data["file"])
|
||||
self.assertIn("&viewmode=12", r.data["terminal"])
|
||||
self.assertIn("&viewmode=11", r.data["control"])
|
||||
self.assertIn("mstsc.html?login=", r.data["webrdp"])
|
||||
|
||||
self.assertIn("&gotonode=", r.data["file"])
|
||||
self.assertIn("&gotonode=", r.data["terminal"])
|
||||
self.assertIn("&gotonode=", r.data["control"])
|
||||
|
||||
self.assertIn("?login=", r.data["file"])
|
||||
self.assertIn("?login=", r.data["terminal"])
|
||||
self.assertIn("?login=", r.data["control"])
|
||||
|
||||
self.assertEqual(self.agent.hostname, r.data["hostname"])
|
||||
self.assertEqual(self.agent.client.name, r.data["client"])
|
||||
@@ -569,12 +539,14 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
||||
@patch("agents.models.Agent.salt_batch_async")
|
||||
def test_bulk_cmd_script(self, mock_ret, mock_update):
|
||||
def test_bulk_cmd_script(
|
||||
self, salt_batch_async, bulk_command, bulk_script, mock_update
|
||||
):
|
||||
url = "/agents/bulk/"
|
||||
|
||||
mock_ret.return_value = "ok"
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"target": "agents",
|
||||
@@ -589,6 +561,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
}
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload = {
|
||||
@@ -620,6 +593,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
@@ -636,12 +610,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
payload["client"] = self.agent.client.id
|
||||
payload["site"] = self.agent.site.id
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
|
||||
payload = {
|
||||
"mode": "scan",
|
||||
@@ -652,9 +621,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.agent.pk,
|
||||
],
|
||||
}
|
||||
mock_ret.return_value = "ok"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
mock_update.assert_called_once()
|
||||
mock_update.assert_called_with(minions=[self.agent.salt_id])
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload = {
|
||||
@@ -666,6 +634,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.agent.pk,
|
||||
],
|
||||
}
|
||||
salt_batch_async.return_value = "ok"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -681,41 +650,18 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_restart_mesh(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/restartmesh/"
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = True
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_recover_mesh(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_recover_mesh(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/recovermesh/"
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn(self.agent.hostname, r.data)
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
|
||||
)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -804,13 +750,6 @@ class TestAgentTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async", return_value=None)
|
||||
def test_get_wmi_detail_task(self, salt_api_async):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
ret = get_wmi_detail_task.s(self.agent.pk).apply()
|
||||
salt_api_async.assert_called_with(timeout=30, func="win_agent.local_sys_info")
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_salt_modules_task(self, salt_api_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
@@ -833,7 +772,7 @@ class TestAgentTasks(TacticalTestCase):
|
||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
|
||||
# chunks of 50, 60 online should run only 2 times
|
||||
# chunks of 50, should run 4 times
|
||||
baker.make_recipe(
|
||||
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
|
||||
)
|
||||
@@ -843,77 +782,9 @@ class TestAgentTasks(TacticalTestCase):
|
||||
_quantity=115,
|
||||
)
|
||||
ret = batch_sync_modules_task.s().apply()
|
||||
self.assertEqual(salt_batch_async.call_count, 2)
|
||||
self.assertEqual(salt_batch_async.call_count, 4)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_batch_sysinfo_task(self, mock_sleep, salt_batch_async):
|
||||
# chunks of 30, 70 online should run only 3 times
|
||||
self.online = baker.make_recipe(
|
||||
"agents.online_agent", version=settings.LATEST_AGENT_VER, _quantity=70
|
||||
)
|
||||
self.overdue = baker.make_recipe(
|
||||
"agents.overdue_agent", version=settings.LATEST_AGENT_VER, _quantity=115
|
||||
)
|
||||
ret = batch_sysinfo_task.s().apply()
|
||||
self.assertEqual(salt_batch_async.call_count, 3)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_batch_async.reset_mock()
|
||||
[i.delete() for i in self.online]
|
||||
[i.delete() for i in self.overdue]
|
||||
|
||||
# test old agents, should not run
|
||||
self.online_old = baker.make_recipe(
|
||||
"agents.online_agent", version="0.10.2", _quantity=70
|
||||
)
|
||||
self.overdue_old = baker.make_recipe(
|
||||
"agents.overdue_agent", version="0.10.2", _quantity=115
|
||||
)
|
||||
ret = batch_sysinfo_task.s().apply()
|
||||
salt_batch_async.assert_not_called()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_update_salt_minion_task(self, mock_sleep, salt_api_async):
|
||||
# test agents that need salt update
|
||||
self.agents = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
salt_ver="1.0.3",
|
||||
_quantity=53,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(salt_api_async.call_count, 53)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
[i.delete() for i in self.agents]
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test agents that need salt update but agent version too low
|
||||
self.agents = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version="0.10.2",
|
||||
salt_ver="1.0.3",
|
||||
_quantity=53,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_api_async.assert_not_called()
|
||||
[i.delete() for i in self.agents]
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test agents already on latest salt ver
|
||||
self.agents = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
salt_ver=settings.LATEST_SALT_VER,
|
||||
_quantity=53,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_api_async.assert_not_called()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
||||
|
||||
@@ -12,7 +12,6 @@ urlpatterns = [
|
||||
path("<pk>/agentdetail/", views.agent_detail),
|
||||
path("<int:pk>/meshcentral/", views.meshcentral),
|
||||
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
|
||||
path("poweraction/", views.power_action),
|
||||
path("uninstall/", views.uninstall),
|
||||
path("editagent/", views.edit_agent),
|
||||
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
|
||||
@@ -20,16 +19,16 @@ urlpatterns = [
|
||||
path("updateagents/", views.update_agents),
|
||||
path("<pk>/getprocs/", views.get_processes),
|
||||
path("<pk>/<pid>/killproc/", views.kill_proc),
|
||||
path("rebootlater/", views.reboot_later),
|
||||
path("reboot/", views.Reboot.as_view()),
|
||||
path("installagent/", views.install_agent),
|
||||
path("<int:pk>/ping/", views.ping),
|
||||
path("recover/", views.recover),
|
||||
path("runscript/", views.run_script),
|
||||
path("<int:pk>/restartmesh/", views.restart_mesh),
|
||||
path("<int:pk>/recovermesh/", views.recover_mesh),
|
||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
||||
path("bulk/", views.bulk),
|
||||
path("agent_counts/", views.agent_counts),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
import os
|
||||
import subprocess
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import pytz
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
from packaging import version as pyver
|
||||
|
||||
@@ -18,12 +18,9 @@ from rest_framework.response import Response
|
||||
from rest_framework import status, generics
|
||||
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from clients.models import Client, Site
|
||||
from accounts.models import User
|
||||
from core.models import CoreSettings
|
||||
from scripts.models import Script
|
||||
from logs.models import AuditLog
|
||||
from logs.models import AuditLog, PendingAction
|
||||
|
||||
from .serializers import (
|
||||
AgentSerializer,
|
||||
@@ -37,9 +34,9 @@ from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .tasks import uninstall_agent_task, send_agent_update_task
|
||||
from winupdate.tasks import bulk_check_for_updates_task
|
||||
from scripts.tasks import run_script_bg_task, run_bulk_script_task
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -66,32 +63,40 @@ def update_agents(request):
|
||||
@api_view()
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=5, func="test.ping")
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
|
||||
if isinstance(r, bool) and r:
|
||||
return Response({"name": agent.hostname, "status": "online"})
|
||||
status = "offline"
|
||||
if agent.has_nats:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
else:
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
r = agent.salt_api_cmd(timeout=5, func="test.ping")
|
||||
if isinstance(r, bool) and r:
|
||||
status = "online"
|
||||
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
def uninstall(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
salt_id = agent.salt_id
|
||||
name = agent.hostname
|
||||
has_nats = agent.has_nats
|
||||
agent.delete()
|
||||
reload_nats()
|
||||
|
||||
uninstall_agent_task.delay(salt_id)
|
||||
uninstall_agent_task.delay(salt_id, has_nats)
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
old_site = agent.site.pk
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
@@ -103,6 +108,11 @@ def edit_agent(request):
|
||||
p_serializer.is_valid(raise_exception=True)
|
||||
p_serializer.save()
|
||||
|
||||
# check if site changed and initiate generating correct policies
|
||||
if old_site != request.data["site"]:
|
||||
agent.generate_checks_from_policies(clear=True)
|
||||
agent.generate_tasks_from_policies(clear=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -118,16 +128,9 @@ def meshcentral(request, pk):
|
||||
if token == "err":
|
||||
return notify_error("Invalid mesh token")
|
||||
|
||||
control = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
)
|
||||
terminal = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
)
|
||||
file = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
)
|
||||
webrdp = f"{core.mesh_site}/mstsc.html?login={token}&node={agent.mesh_node_id}"
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
|
||||
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
|
||||
|
||||
@@ -136,7 +139,6 @@ def meshcentral(request, pk):
|
||||
"control": control,
|
||||
"terminal": terminal,
|
||||
"file": file,
|
||||
"webrdp": webrdp,
|
||||
"status": agent.status,
|
||||
"client": agent.client.name,
|
||||
"site": agent.site.name,
|
||||
@@ -153,12 +155,11 @@ def agent_detail(request, pk):
|
||||
@api_view()
|
||||
def get_processes(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=20, func="win_agent.get_procs")
|
||||
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
return Response(r)
|
||||
|
||||
@@ -166,15 +167,17 @@ def get_processes(request, pk):
|
||||
@api_view()
|
||||
def kill_proc(request, pk, pid):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=25, func="ps.kill_pid", arg=int(pid))
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
if isinstance(r, bool) and not r:
|
||||
return notify_error("Unable to kill the process")
|
||||
elif r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -182,55 +185,41 @@ def kill_proc(request, pk, pid):
|
||||
@api_view()
|
||||
def get_event_log(request, pk, logtype, days):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30,
|
||||
func="win_agent.get_eventlog",
|
||||
arg=[logtype, int(days)],
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
data = {
|
||||
"func": "eventlog",
|
||||
"timeout": 30,
|
||||
"payload": {
|
||||
"logname": logtype,
|
||||
"days": str(days),
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=32))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(json.loads(zlib.decompress(base64.b64decode(r["wineventlog"]))))
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def power_action(request):
|
||||
pk = request.data["pk"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if action == "rebootnow":
|
||||
logger.info(f"{agent.hostname} was scheduled for immediate reboot")
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30,
|
||||
func="system.reboot",
|
||||
arg=3,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def send_raw_cmd(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=request.data["timeout"],
|
||||
func="cmd.run",
|
||||
kwargs={
|
||||
"cmd": request.data["cmd"],
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = int(request.data["timeout"])
|
||||
data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"command": request.data["cmd"],
|
||||
"shell": request.data["shell"],
|
||||
"timeout": request.data["timeout"],
|
||||
},
|
||||
)
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error" or not r:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
AuditLog.audit_raw_command(
|
||||
username=request.user.username,
|
||||
@@ -239,7 +228,6 @@ def send_raw_cmd(request):
|
||||
shell=request.data["shell"],
|
||||
)
|
||||
|
||||
logger.info(f"The command {request.data['cmd']} was sent on agent {agent.hostname}")
|
||||
return Response(r)
|
||||
|
||||
|
||||
@@ -372,24 +360,63 @@ def overdue_action(request):
|
||||
return Response(agent.hostname)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def reboot_later(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
date_time = request.data["datetime"]
|
||||
class Reboot(APIView):
|
||||
# reboot now
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
try:
|
||||
obj = dt.datetime.strptime(date_time, "%Y-%m-%d %H:%M")
|
||||
except Exception:
|
||||
return notify_error("Invalid date")
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
r = agent.schedule_reboot(obj)
|
||||
return Response("ok")
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "failed":
|
||||
return notify_error("Something went wrong")
|
||||
# reboot later
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
return Response(r["msg"])
|
||||
try:
|
||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||
except Exception:
|
||||
return notify_error("Invalid date")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||
"month": dt.datetime.strftime(obj, "%B"),
|
||||
"day": int(dt.datetime.strftime(obj, "%d")),
|
||||
"hour": int(dt.datetime.strftime(obj, "%H")),
|
||||
"min": int(dt.datetime.strftime(obj, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
details = {"taskname": task_name, "time": str(obj)}
|
||||
PendingAction.objects.create(
|
||||
agent=agent, action_type="schedreboot", details=details
|
||||
)
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return Response(
|
||||
{"time": nice_time, "agent": agent.hostname, "task_name": task_name}
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@@ -550,7 +577,7 @@ def install_agent(request):
|
||||
"&&",
|
||||
"timeout",
|
||||
"/t",
|
||||
"20",
|
||||
"10",
|
||||
"/nobreak",
|
||||
">",
|
||||
"NUL",
|
||||
@@ -636,35 +663,60 @@ def install_agent(request):
|
||||
@api_view(["POST"])
|
||||
def recover(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
mode = request.data["mode"]
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("0.9.5"):
|
||||
return notify_error("Only available in agent version greater than 0.9.5")
|
||||
|
||||
if not agent.has_nats:
|
||||
if mode == "tacagent" or mode == "checkrunner" or mode == "rpc":
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||
if agent.has_nats:
|
||||
if (
|
||||
mode == "tacagent"
|
||||
or mode == "checkrunner"
|
||||
or mode == "salt"
|
||||
or mode == "mesh"
|
||||
):
|
||||
data = {"func": "recover", "payload": {"mode": mode}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "ok":
|
||||
return Response("Successfully completed recovery")
|
||||
|
||||
if agent.recoveryactions.filter(last_run=None).exists():
|
||||
return notify_error(
|
||||
"A recovery action is currently pending. Please wait for the next agent check-in."
|
||||
)
|
||||
|
||||
if request.data["mode"] == "command" and not request.data["cmd"]:
|
||||
if mode == "command" and not request.data["cmd"]:
|
||||
return notify_error("Command is required")
|
||||
|
||||
# if we've made it this far and realtime recovery didn't work,
|
||||
# tacagent service is the fallback recovery so we obv can't use that to recover itself if it's down
|
||||
if mode == "tacagent":
|
||||
return notify_error(
|
||||
"Requires RPC service to be functional. Please recover that first"
|
||||
)
|
||||
|
||||
# we should only get here if all other methods fail
|
||||
RecoveryAction(
|
||||
agent=agent,
|
||||
mode=request.data["mode"],
|
||||
command=request.data["cmd"] if request.data["mode"] == "command" else None,
|
||||
mode=mode,
|
||||
command=request.data["cmd"] if mode == "command" else None,
|
||||
).save()
|
||||
|
||||
return Response(f"Recovery will be attempted on the agent's next check-in")
|
||||
return Response("Recovery will be attempted on the agent's next check-in")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def run_script(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
@@ -673,75 +725,33 @@ def run_script(request):
|
||||
script=script.name,
|
||||
)
|
||||
|
||||
data = {
|
||||
"func": "runscript",
|
||||
"timeout": request.data["timeout"],
|
||||
"script_args": request.data["args"],
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
|
||||
if output == "wait":
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=req_timeout,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": args,
|
||||
},
|
||||
)
|
||||
|
||||
if isinstance(r, dict):
|
||||
if r["stdout"]:
|
||||
return Response(r["stdout"])
|
||||
elif r["stderr"]:
|
||||
return Response(r["stderr"])
|
||||
else:
|
||||
try:
|
||||
r["retcode"]
|
||||
except KeyError:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
return Response(f"Return code: {r['retcode']}")
|
||||
|
||||
else:
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
else:
|
||||
return notify_error(str(r))
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
||||
return Response(r)
|
||||
else:
|
||||
data = {
|
||||
"agentpk": agent.pk,
|
||||
"scriptpk": script.pk,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": args,
|
||||
}
|
||||
run_script_bg_task.delay(data)
|
||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
def restart_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(func="service.restart", arg="mesh agent", timeout=30)
|
||||
if r == "timeout" or r == "error":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif isinstance(r, bool) and r:
|
||||
return Response(f"Restarted Mesh Agent on {agent.hostname}")
|
||||
else:
|
||||
return notify_error(f"Failed to restart the Mesh Agent on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
def recover_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=60,
|
||||
func="cmd.run",
|
||||
kwargs={
|
||||
"cmd": r'"C:\\Program Files\\TacticalAgent\\tacticalrmm.exe" -m recovermesh',
|
||||
"timeout": 55,
|
||||
},
|
||||
)
|
||||
if r == "timeout" or r == "error":
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=45))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(f"Repaired mesh agent on {agent.hostname}")
|
||||
@@ -805,73 +815,44 @@ def bulk(request):
|
||||
return notify_error("Must select at least 1 agent")
|
||||
|
||||
if request.data["target"] == "client":
|
||||
agents = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
q = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
elif request.data["target"] == "site":
|
||||
agents = Agent.objects.filter(site_id=request.data["site"])
|
||||
q = Agent.objects.filter(site_id=request.data["site"])
|
||||
elif request.data["target"] == "agents":
|
||||
agents = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
elif request.data["target"] == "all":
|
||||
agents = Agent.objects.all()
|
||||
q = Agent.objects.all()
|
||||
else:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
minions = [agent.salt_id for agent in agents]
|
||||
minions = [agent.salt_id for agent in q]
|
||||
agents = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
r = Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="cmd.run_bg",
|
||||
kwargs={
|
||||
"cmd": request.data["cmd"],
|
||||
"shell": request.data["shell"],
|
||||
"timeout": request.data["timeout"],
|
||||
},
|
||||
handle_bulk_command_task.delay(
|
||||
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
|
||||
)
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
return Response(f"Command will now be run on {len(minions)} agents")
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
|
||||
if script.shell == "python":
|
||||
r = Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": request.data["args"],
|
||||
"bg": True,
|
||||
},
|
||||
)
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
else:
|
||||
data = {
|
||||
"minions": minions,
|
||||
"scriptpk": script.pk,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": request.data["args"],
|
||||
}
|
||||
run_bulk_script_task.delay(data)
|
||||
|
||||
return Response(f"{script.name} will now be run on {len(minions)} agents")
|
||||
handle_bulk_script_task.delay(
|
||||
script.pk, agents, request.data["args"], request.data["timeout"]
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(minions)} agents"
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(minions=minions)
|
||||
return Response(f"Patch status scan will now run on {len(minions)} agents")
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -917,3 +898,15 @@ def agent_maintenance(request):
|
||||
return notify_error("Invalid data")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WMI(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
||||
return notify_error("Requires agent version 1.1.2 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response("ok")
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "api"
|
||||
@@ -1,11 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("triggerpatchscan/", views.trigger_patch_scan),
|
||||
path("<int:pk>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<int:pk>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("<int:pk>/saltinfo/", views.SaltInfo.as_view()),
|
||||
path("<int:pk>/meshinfo/", v3_views.MeshInfo.as_view()),
|
||||
]
|
||||
@@ -1,149 +0,0 @@
|
||||
from loguru import logger
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import (
|
||||
api_view,
|
||||
authentication_classes,
|
||||
permission_classes,
|
||||
)
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
|
||||
from autotasks.serializers import TaskRunnerGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.serializers import CheckRunnerGetSerializer, CheckResultsSerializer
|
||||
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
@authentication_classes((TokenAuthentication,))
|
||||
@permission_classes((IsAuthenticated,))
|
||||
def trigger_patch_scan(request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
else:
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For windows agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
checks = Check.objects.filter(agent__pk=pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
if check.check_type != "cpuload" and check.check_type != "memory":
|
||||
serializer = CheckResultsSerializer(
|
||||
instance=check, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
|
||||
else:
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
|
||||
check.handle_check(request.data)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskRunnerGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
serializer = TaskRunnerPatchSerializer(
|
||||
instance=task, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SaltInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
@@ -45,15 +45,11 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
def test_get_mesh_info(self):
|
||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
||||
url2 = f"/api/v1/{self.agent.pk}/meshinfo/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
r = self.client.get(url2)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_winupdater(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
|
||||
|
||||
@@ -14,4 +14,6 @@ urlpatterns = [
|
||||
path("newagent/", views.NewAgent.as_view()),
|
||||
path("winupdater/", views.WinUpdater.as_view()),
|
||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
||||
path("software/", views.Software.as_view()),
|
||||
path("installer/", views.Installer.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import asyncio
|
||||
import os
|
||||
import requests
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from django.http import HttpResponse
|
||||
from rest_framework import serializers
|
||||
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@@ -19,6 +20,7 @@ from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from accounts.models import User
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from software.models import InstalledSoftware
|
||||
from checks.serializers import CheckRunnerGetSerializerV3
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
@@ -27,13 +29,12 @@ from winupdate.serializers import ApprovedUpdateSerializer
|
||||
from agents.tasks import (
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
get_wmi_detail_task,
|
||||
sync_salt_modules_task,
|
||||
)
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
from software.tasks import get_installed_software, install_chocolatey
|
||||
from software.tasks import install_chocolatey
|
||||
from checks.utils import bytes2human
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -97,6 +98,17 @@ class Hello(APIView):
|
||||
recovery.save(update_fields=["last_run"])
|
||||
return Response(recovery.send())
|
||||
|
||||
# handle agent update
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
update = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
update.status = "completed"
|
||||
update.save(update_fields=["status"])
|
||||
return Response(update.details)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
agent.handle_pending_actions()
|
||||
@@ -111,8 +123,6 @@ class Hello(APIView):
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
|
||||
sync_salt_modules_task.delay(agent.pk)
|
||||
get_installed_software.delay(agent.pk)
|
||||
get_wmi_detail_task.delay(agent.pk)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
||||
)
|
||||
@@ -133,8 +143,6 @@ class CheckRunner(APIView):
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
@@ -333,21 +341,16 @@ class WinUpdater(APIView):
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
else:
|
||||
logger.info(
|
||||
f"{agent.hostname} is rebooting after updates were installed."
|
||||
agent.salt_api_async(
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
@@ -381,7 +384,15 @@ class MeshInfo(APIView):
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.mesh_node_id = request.data["nodeidhex"]
|
||||
|
||||
if "nodeidhex" in request.data:
|
||||
# agent <= 1.1.0
|
||||
nodeid = request.data["nodeidhex"]
|
||||
else:
|
||||
# agent >= 1.1.1
|
||||
nodeid = request.data["nodeid"]
|
||||
|
||||
agent.mesh_node_id = nodeid
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
return Response("ok")
|
||||
|
||||
@@ -448,6 +459,8 @@ class NewAgent(APIView):
|
||||
else:
|
||||
WinUpdatePolicy(agent=agent).save()
|
||||
|
||||
reload_nats()
|
||||
|
||||
# Generate policies for new agent
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
@@ -469,3 +482,42 @@ class NewAgent(APIView):
|
||||
"token": token.key,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Software(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first()
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Installer(APIView):
|
||||
def get(self, request):
|
||||
# used to check if token is valid. will return 401 if not
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
if "version" not in request.data:
|
||||
return notify_error("Invalid data")
|
||||
|
||||
ver = request.data["version"]
|
||||
if pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER):
|
||||
return notify_error(
|
||||
f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM"
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -1051,10 +1051,13 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for task in tasks:
|
||||
run_win_task.assert_any_call(task.id)
|
||||
|
||||
def test_update_policy_tasks(self):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_update_policy_tasks(self, nats_cmd):
|
||||
from .tasks import update_policy_task_fields_task
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
nats_cmd.return_value = "ok"
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make(
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-29 09:12
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0008_auto_20201030_1515'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='run_time_bit_weekdays',
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
from django.db import migrations
|
||||
from tacticalrmm.utils import get_bit_days
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday",
|
||||
}
|
||||
|
||||
|
||||
def migrate_days(apps, schema_editor):
|
||||
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
|
||||
for task in AutomatedTask.objects.exclude(run_time_days__isnull=True).exclude(
|
||||
run_time_days=[]
|
||||
):
|
||||
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
|
||||
task.run_time_bit_weekdays = get_bit_days(run_days)
|
||||
task.save(update_fields=["run_time_bit_weekdays"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0009_automatedtask_run_time_bit_weekdays"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_days),
|
||||
]
|
||||
@@ -8,6 +8,7 @@ from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models.fields import DateTimeField
|
||||
from automation.models import Policy
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
@@ -69,6 +70,8 @@ class AutomatedTask(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
run_time_bit_weekdays = models.IntegerField(null=True, blank=True)
|
||||
# run_time_days is deprecated, use bit weekdays
|
||||
run_time_days = ArrayField(
|
||||
models.IntegerField(choices=RUN_TIME_DAY_CHOICES, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -107,21 +110,12 @@ class AutomatedTask(BaseAuditModel):
|
||||
elif self.task_type == "runonce":
|
||||
return f'Run once on {self.run_time_date.strftime("%m/%d/%Y %I:%M%p")}'
|
||||
elif self.task_type == "scheduled":
|
||||
ret = []
|
||||
for i in self.run_time_days:
|
||||
for j in RUN_TIME_DAY_CHOICES:
|
||||
if i in j:
|
||||
ret.append(j[1][0:3])
|
||||
|
||||
run_time_nice = dt.datetime.strptime(
|
||||
self.run_time_minute, "%H:%M"
|
||||
).strftime("%I:%M %p")
|
||||
|
||||
if len(ret) == 7:
|
||||
return f"Every day at {run_time_nice}"
|
||||
else:
|
||||
days = ",".join(ret)
|
||||
return f"{days} at {run_time_nice}"
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"{days} at {run_time_nice}"
|
||||
|
||||
@property
|
||||
def last_run_as_timezone(self):
|
||||
@@ -169,6 +163,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
name=self.name,
|
||||
run_time_days=self.run_time_days,
|
||||
run_time_minute=self.run_time_minute,
|
||||
run_time_bit_weekdays=self.run_time_bit_weekdays,
|
||||
run_time_date=self.run_time_date,
|
||||
task_type=self.task_type,
|
||||
win_task_name=self.win_task_name,
|
||||
|
||||
@@ -1,52 +1,37 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
from django.conf import settings
|
||||
import pytz
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from .models import AutomatedTask
|
||||
from logs.models import PendingAction
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday",
|
||||
}
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if task.task_type == "scheduled":
|
||||
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Weekly",
|
||||
f'start_time="{task.run_time_minute}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
kwargs={"days_of_week": run_days},
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": task.run_time_bit_weekdays,
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"hour": dt.datetime.strptime(task.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(task.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif task.task_type == "runonce":
|
||||
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(task.agent.timezone)
|
||||
task_time_utc = task.run_time_date.replace(tzinfo=agent_tz).astimezone(pytz.utc)
|
||||
@@ -57,45 +42,41 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
task.save()
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{task.run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{task.run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"year": int(dt.datetime.strftime(task.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(task.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(task.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(task.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(task.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if task.remove_if_not_scheduled and pyver.parse(
|
||||
task.agent.version
|
||||
) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
PendingAction(
|
||||
@@ -129,13 +110,16 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.edit_task",
|
||||
arg=[f"name={task.win_task_name}", f"enabled={action}"],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": task.win_task_name,
|
||||
"enabled": action,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data))
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
PendingAction(
|
||||
@@ -150,9 +134,6 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to update the scheduled task {task.win_task_name} on {task.agent.hostname}. It will be updated when the agent checks in."
|
||||
)
|
||||
return
|
||||
|
||||
# clear pending action since it was successful
|
||||
@@ -163,7 +144,6 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
|
||||
task.sync_status = "synced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was edited.")
|
||||
return "ok"
|
||||
|
||||
|
||||
@@ -171,13 +151,13 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
def delete_win_task_schedule(pk, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=[f"name={task.win_task_name}"],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task.win_task_name},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
PendingAction(
|
||||
@@ -188,9 +168,6 @@ def delete_win_task_schedule(pk, pending_action=False):
|
||||
task.sync_status = "pendingdeletion"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to delete scheduled task {task.win_task_name} on {task.agent.hostname}. It was marked pending deletion and will be removed when the agent checks in."
|
||||
)
|
||||
return
|
||||
|
||||
# complete pending action since it was successful
|
||||
@@ -200,14 +177,13 @@ def delete_win_task_schedule(pk, pending_action=False):
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
task.delete()
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was deleted.")
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_task(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
r = task.agent.salt_api_async(func="task.run", arg=[f"name={task.win_task_name}"])
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
return "ok"
|
||||
|
||||
|
||||
@@ -219,18 +195,9 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="task.list_tasks",
|
||||
)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
logger.error(
|
||||
f"Unable to clean up scheduled tasks on {agent.hostname}. Agent might be offline"
|
||||
)
|
||||
return "errtimeout"
|
||||
|
||||
if not isinstance(r, list):
|
||||
if not isinstance(r, list) and not r: # empty list
|
||||
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
|
||||
return "notlist"
|
||||
|
||||
@@ -239,7 +206,8 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
exclude_tasks = (
|
||||
"TacticalRMM_fixmesh",
|
||||
"TacticalRMM_SchedReboot",
|
||||
"TacticalRMM_saltwatchdog", # will be implemented in future
|
||||
"TacticalRMM_sync",
|
||||
"TacticalRMM_agentupdate",
|
||||
)
|
||||
|
||||
for task in r:
|
||||
@@ -249,16 +217,16 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
if task.startswith("TacticalRMM_") and task not in agent_task_names:
|
||||
# delete task since it doesn't exist in UI
|
||||
ret = agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=[f"name={task}"],
|
||||
)
|
||||
if isinstance(ret, bool) and ret is True:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
else:
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task},
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
logger.error(
|
||||
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import datetime as dt
|
||||
from unittest.mock import patch, call
|
||||
from model_bakery import baker
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -25,9 +26,9 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
# setup data
|
||||
script = baker.make_recipe("scripts.script")
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_old = baker.make_recipe("agents.agent", version="0.9.0")
|
||||
policy = baker.make("automation.Policy")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
|
||||
# test script set to invalid pk
|
||||
data = {"autotask": {"script": 500}}
|
||||
@@ -50,10 +51,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test invalid agent version
|
||||
# test old agent version
|
||||
data = {
|
||||
"autotask": {"script": script.id, "script_args": ["args"]},
|
||||
"agent": agent_old.id,
|
||||
"autotask": {"script": script.id},
|
||||
"agent": old_agent.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
@@ -63,7 +64,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Scheduled with Assigned Check",
|
||||
"run_time_days": [0, 1, 2],
|
||||
"run_time_days": ["Sunday", "Monday", "Friday"],
|
||||
"run_time_minute": "10:00",
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
@@ -84,6 +85,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Manual",
|
||||
"run_time_days": [],
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
@@ -181,10 +183,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("autotasks.tasks.run_win_task.delay")
|
||||
def test_run_autotask(self, run_win_task):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_autotask(self, nats_cmd):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
# test invalid url
|
||||
@@ -195,7 +197,15 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
url = f"/tasks/runwintask/{task.id}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
run_win_task.assert_called_with(task.id)
|
||||
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
|
||||
url = f"/tasks/runwintask/{task2.id}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -205,8 +215,8 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_remove_orphaned_win_task(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_remove_orphaned_win_task(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
self.task1 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
@@ -214,20 +224,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
)
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "errtimeout")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "errtimeout")
|
||||
|
||||
salt_api_cmd.return_value = "task not found in"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "notlist")
|
||||
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test removing an orphaned task
|
||||
win_tasks = [
|
||||
"Adobe Acrobat Update Task",
|
||||
@@ -242,50 +238,54 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
]
|
||||
|
||||
self.calls = [
|
||||
call(timeout=15, func="task.list_tasks"),
|
||||
call({"func": "listschedtasks"}, timeout=10),
|
||||
call(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=["name=TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"],
|
||||
{
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": "TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
),
|
||||
]
|
||||
|
||||
salt_api_cmd.side_effect = [win_tasks, True]
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 2)
|
||||
salt_api_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
nats_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test salt delete_task fail
|
||||
salt_api_cmd.reset_mock()
|
||||
salt_api_cmd.side_effect = [win_tasks, False]
|
||||
# test nats delete task fail
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.side_effect = [win_tasks, "error deleting task"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
salt_api_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(salt_api_cmd.call_count, 2)
|
||||
nats_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# no orphaned tasks
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb")
|
||||
salt_api_cmd.side_effect = [win_tasks, True]
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 1)
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
def test_run_win_task(self, salt_api_async):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_win_task(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
self.task1 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
name="test task 1",
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
)
|
||||
salt_api_async.return_value = "Response 200"
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = run_win_task.s(self.task1.pk).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_create_win_task_schedule(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_create_win_task_schedule(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
@@ -295,46 +295,32 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
name="test task 1",
|
||||
win_task_name=task_name,
|
||||
task_type="scheduled",
|
||||
run_time_days=[0, 1, 6],
|
||||
run_time_bit_weekdays=127,
|
||||
run_time_minute="21:55",
|
||||
)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 1)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task1.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Weekly",
|
||||
'start_time="21:55"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
kwargs={"days_of_week": ["Monday", "Tuesday", "Sunday"]},
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": 127,
|
||||
"pk": self.task1.pk,
|
||||
"name": task_name,
|
||||
"hour": 21,
|
||||
"min": 55,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "synced")
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
salt_api_cmd.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
@@ -345,7 +331,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
agent=self.agent, action_type="taskaction"
|
||||
)
|
||||
self.assertEqual(self.pending_action.status, "pending")
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(
|
||||
pk=self.task1.pk, pending_action=self.pending_action.pk
|
||||
).apply()
|
||||
@@ -354,7 +340,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
self.assertEqual(self.pending_action.status, "completed")
|
||||
|
||||
# test runonce with future date
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
run_time_date = djangotime.now() + djangotime.timedelta(hours=22)
|
||||
self.task2 = AutomatedTask.objects.create(
|
||||
@@ -364,30 +350,29 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="runonce",
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task2.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": self.task2.pk,
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(self.task2.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(self.task2.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(self.task2.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(self.task2.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test runonce with date in the past
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
run_time_date = djangotime.now() - djangotime.timedelta(days=13)
|
||||
self.task3 = AutomatedTask.objects.create(
|
||||
@@ -397,31 +382,13 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="runonce",
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
|
||||
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task3.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{self.task3.run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{self.task3.run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test checkfailure
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
self.check = baker.make_recipe("checks.diskspace_check", agent=self.agent)
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
self.task4 = AutomatedTask.objects.create(
|
||||
@@ -431,29 +398,24 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="checkfailure",
|
||||
assigned_check=self.check,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task4.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.task4.pk,
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test manual
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
self.task5 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
@@ -461,23 +423,18 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
win_task_name=task_name,
|
||||
task_type="manual",
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task5.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.task5.pk,
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import pytz
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
@@ -17,10 +18,9 @@ from .serializers import TaskSerializer, AutoTaskSerializer
|
||||
from .tasks import (
|
||||
create_win_task_schedule,
|
||||
delete_win_task_schedule,
|
||||
run_win_task,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, get_bit_days
|
||||
|
||||
|
||||
class AddAutoTask(APIView):
|
||||
@@ -38,17 +38,20 @@ class AddAutoTask(APIView):
|
||||
parent = {"policy": policy}
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
parent = {"agent": agent}
|
||||
added = "0.11.0"
|
||||
if data["autotask"]["script_args"] and agent.not_supported(added):
|
||||
return notify_error(
|
||||
f"Script arguments only available in agent {added} or greater"
|
||||
)
|
||||
|
||||
check = None
|
||||
if data["autotask"]["assigned_check"]:
|
||||
check = get_object_or_404(Check, pk=data["autotask"]["assigned_check"])
|
||||
|
||||
bit_weekdays = None
|
||||
if data["autotask"]["run_time_days"]:
|
||||
bit_weekdays = get_bit_days(data["autotask"]["run_time_days"])
|
||||
|
||||
del data["autotask"]["run_time_days"]
|
||||
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save(
|
||||
@@ -56,6 +59,7 @@ class AddAutoTask(APIView):
|
||||
script=script,
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
assigned_check=check,
|
||||
run_time_bit_weekdays=bit_weekdays,
|
||||
)
|
||||
|
||||
if not "policy" in data:
|
||||
@@ -114,5 +118,8 @@ class AutoTask(APIView):
|
||||
@api_view()
|
||||
def run_task(request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
run_win_task.delay(task.pk)
|
||||
if not task.agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import base64
|
||||
import asyncio
|
||||
import string
|
||||
import os
|
||||
import json
|
||||
import pytz
|
||||
import zlib
|
||||
from statistics import mean
|
||||
|
||||
from django.db import models
|
||||
@@ -306,12 +305,16 @@ class Check(BaseAuditModel):
|
||||
self.status = "passing"
|
||||
else:
|
||||
if self.agent and self.restart_if_stopped:
|
||||
r = self.agent.salt_api_cmd(
|
||||
func="service.restart", arg=self.svc_name, timeout=45
|
||||
)
|
||||
if r == "timeout" or r == "error":
|
||||
nats_data = {
|
||||
"func": "winsvcaction",
|
||||
"payload": {"name": self.svc_name, "action": "start"},
|
||||
}
|
||||
r = asyncio.run(self.agent.nats_cmd(nats_data, timeout=32))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
self.status = "failing"
|
||||
elif isinstance(r, bool) and r:
|
||||
elif not r["success"] and r["errormsg"]:
|
||||
self.status = "failing"
|
||||
elif r["success"]:
|
||||
self.status = "passing"
|
||||
self.more_info = f"Status RUNNING"
|
||||
else:
|
||||
@@ -336,8 +339,7 @@ class Check(BaseAuditModel):
|
||||
eventID = self.event_id
|
||||
source = self.event_source
|
||||
message = self.event_message
|
||||
|
||||
r = json.loads(zlib.decompress(base64.b64decode(data["log"])))
|
||||
r = data["log"]
|
||||
|
||||
for i in r:
|
||||
if i["eventType"] == eventType:
|
||||
|
||||
@@ -56,10 +56,3 @@ def handle_check_sms_alert_task(pk):
|
||||
check.save(update_fields=["text_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_checks_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
agent.salt_api_async(func="win_agent.run_manual_checks")
|
||||
return "ok"
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import asyncio
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
@@ -13,7 +15,6 @@ from scripts.models import Script
|
||||
|
||||
from .serializers import CheckSerializer
|
||||
|
||||
from .tasks import run_checks_task
|
||||
|
||||
from automation.tasks import (
|
||||
generate_agent_checks_from_policies_task,
|
||||
@@ -35,17 +36,6 @@ class AddCheck(APIView):
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
parent = {"agent": agent}
|
||||
added = "0.11.0"
|
||||
if (
|
||||
request.data["check"]["check_type"] == "script"
|
||||
and request.data["check"]["script_args"]
|
||||
and agent.not_supported(version_added=added)
|
||||
):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": f"Script arguments only available in agent {added} or greater"
|
||||
}
|
||||
)
|
||||
|
||||
script = None
|
||||
if "script" in request.data["check"]:
|
||||
@@ -57,13 +47,6 @@ class AddCheck(APIView):
|
||||
request.data["check"]["check_type"] == "eventlog"
|
||||
and request.data["check"]["event_id_is_wildcard"]
|
||||
):
|
||||
if agent and agent.not_supported(version_added="0.10.2"):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
|
||||
}
|
||||
)
|
||||
|
||||
request.data["check"]["event_id"] = 0
|
||||
|
||||
serializer = CheckSerializer(
|
||||
@@ -115,31 +98,8 @@ class GetUpdateDeleteCheck(APIView):
|
||||
pass
|
||||
else:
|
||||
if request.data["event_id_is_wildcard"]:
|
||||
if check.agent.not_supported(version_added="0.10.2"):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
|
||||
}
|
||||
)
|
||||
|
||||
request.data["event_id"] = 0
|
||||
|
||||
elif check.check_type == "script":
|
||||
added = "0.11.0"
|
||||
try:
|
||||
request.data["script_args"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if request.data["script_args"] and check.agent.not_supported(
|
||||
version_added=added
|
||||
):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": f"Script arguments only available in agent {added} or greater"
|
||||
}
|
||||
)
|
||||
|
||||
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
@@ -178,7 +138,10 @@ class GetUpdateDeleteCheck(APIView):
|
||||
@api_view()
|
||||
def run_checks(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
run_checks_task.delay(agent.pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
|
||||
return Response(agent.hostname)
|
||||
|
||||
|
||||
|
||||
@@ -55,8 +55,7 @@ class Client(BaseAuditModel):
|
||||
return True
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
return True
|
||||
return agent.status == "overdue"
|
||||
|
||||
return False
|
||||
|
||||
@@ -116,8 +115,7 @@ class Site(BaseAuditModel):
|
||||
return True
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
return True
|
||||
return agent.status == "overdue"
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@@ -56,8 +56,8 @@ func downloadAgent(filepath string) (err error) {
|
||||
func main() {
|
||||
|
||||
debugLog := flag.String("log", "", "Verbose output")
|
||||
localSalt := flag.String("local-salt", "", "Use local salt minion")
|
||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||
noSalt := flag.Bool("nosalt", false, "Does not install salt")
|
||||
cert := flag.String("cert", "", "Path to ca.pem")
|
||||
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
||||
flag.Parse()
|
||||
@@ -81,8 +81,8 @@ func main() {
|
||||
cmdArgs = append(cmdArgs, "--log", "DEBUG")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localSalt)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--local-salt", *localSalt)
|
||||
if *noSalt {
|
||||
cmdArgs = append(cmdArgs, "-nosalt")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||
@@ -133,7 +133,7 @@ func main() {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
time.Sleep(20 * time.Second)
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Println("Installation starting.")
|
||||
cmd := exec.Command(tacrmm, cmdArgs...)
|
||||
|
||||
@@ -36,7 +36,7 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
|
||||
write-host ('Extracting...')
|
||||
Start-Sleep -s 20
|
||||
Start-Sleep -s 10
|
||||
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
|
||||
exit 0
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
from .helpers import get_auth_token
|
||||
import asyncio
|
||||
import ssl
|
||||
import websockets
|
||||
import json
|
||||
|
||||
@@ -11,15 +10,14 @@ import json
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self):
|
||||
token = get_auth_token(
|
||||
self.mesh_settings.mesh_username, self.mesh_settings.mesh_token
|
||||
)
|
||||
async def websocket_call(self, mesh_settings):
|
||||
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
|
||||
|
||||
if settings.MESH_WS_URL:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
if settings.DOCKER_BUILD:
|
||||
site = mesh_settings.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:443/control.ashx?auth={token}"
|
||||
else:
|
||||
site = self.mesh_settings.mesh_site.replace("https", "wss")
|
||||
site = mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
@@ -45,5 +43,5 @@ class Command(BaseCommand):
|
||||
break
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.mesh_settings = CoreSettings.objects.first()
|
||||
asyncio.get_event_loop().run_until_complete(self.websocket_call())
|
||||
mesh_settings = CoreSettings.objects.first()
|
||||
asyncio.get_event_loop().run_until_complete(self.websocket_call(mesh_settings))
|
||||
|
||||
@@ -1,53 +1,90 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
from .helpers import get_auth_token
|
||||
import asyncio
|
||||
import websockets
|
||||
import json
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self):
|
||||
|
||||
token = get_auth_token(
|
||||
self.mesh_settings.mesh_username, self.mesh_settings.mesh_token
|
||||
)
|
||||
|
||||
if settings.MESH_WS_URL:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
else:
|
||||
site = self.mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
async for message in websocket:
|
||||
response = json.loads(message)
|
||||
if response["action"] == "meshes":
|
||||
|
||||
# If no meshes are present
|
||||
if not response["meshes"]:
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
{
|
||||
"action": "createmesh",
|
||||
"meshname": "TacticalRMM",
|
||||
"meshtype": 2,
|
||||
"responseid": "python",
|
||||
}
|
||||
)
|
||||
)
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.mesh_settings = CoreSettings.objects.first()
|
||||
asyncio.get_event_loop().run_until_complete(self.websocket_call())
|
||||
self.stdout.write("Initial Mesh Central setup complete")
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
from .helpers import get_auth_token
|
||||
import asyncio
|
||||
import websockets
|
||||
import json
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, mesh_settings):
|
||||
|
||||
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
|
||||
|
||||
if settings.DOCKER_BUILD:
|
||||
site = mesh_settings.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:443/control.ashx?auth={token}"
|
||||
else:
|
||||
site = mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
async for message in websocket:
|
||||
response = json.loads(message)
|
||||
if response["action"] == "meshes":
|
||||
|
||||
# If no meshes are present
|
||||
if not response["meshes"]:
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
{
|
||||
"action": "createmesh",
|
||||
"meshname": "TacticalRMM",
|
||||
"meshtype": 2,
|
||||
"responseid": "python",
|
||||
}
|
||||
)
|
||||
)
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
mesh_settings = CoreSettings.objects.first()
|
||||
|
||||
try:
|
||||
# Check for Mesh Username
|
||||
if (
|
||||
not mesh_settings.mesh_username
|
||||
or settings.MESH_USERNAME != mesh_settings.mesh_username
|
||||
):
|
||||
mesh_settings.mesh_username = settings.MESH_USERNAME
|
||||
|
||||
# Check for Mesh Site
|
||||
if (
|
||||
not mesh_settings.mesh_site
|
||||
or settings.MESH_SITE != mesh_settings.mesh_site
|
||||
):
|
||||
mesh_settings.mesh_site = settings.MESH_SITE
|
||||
|
||||
# Check for Mesh Token
|
||||
if (
|
||||
not mesh_settings.mesh_token
|
||||
or settings.MESH_TOKEN_KEY != mesh_settings.mesh_token
|
||||
):
|
||||
mesh_settings.mesh_token = settings.MESH_TOKEN_KEY
|
||||
|
||||
mesh_settings.save()
|
||||
|
||||
except AttributeError:
|
||||
self.stdout.write(
|
||||
"Mesh Setup was skipped because the configuration wasn't available. Needs to be setup manually."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
asyncio.get_event_loop().run_until_complete(
|
||||
self.websocket_call(mesh_settings)
|
||||
)
|
||||
self.stdout.write("Initial Mesh Central setup complete")
|
||||
except websockets.exceptions.ConnectionClosedError:
|
||||
self.stdout.write(
|
||||
"Unable to connect to MeshCentral. Please verify it is online and the configuration is correct in the settings."
|
||||
)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from time import sleep
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
@@ -15,18 +13,6 @@ class Command(BaseCommand):
|
||||
help = "Collection of tasks to run after updating the rmm, after migrations"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
if not os.path.exists("/usr/local/bin/goversioninfo"):
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"ERROR: New update script available. Delete this one and re-download."
|
||||
)
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
sys.exit(1)
|
||||
|
||||
# 10-16-2020 changed the type of the agent's 'disks' model field
|
||||
# from a dict of dicts, to a list of disks in the golang agent
|
||||
# the following will convert dicts to lists for agent's still on the python agent
|
||||
@@ -43,88 +29,17 @@ class Command(BaseCommand):
|
||||
self.style.SUCCESS(f"Migrated disks on {agent.hostname}")
|
||||
)
|
||||
|
||||
# sync modules. split into chunks of 60 agents to not overload the salt master
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents if i.status == "online"]
|
||||
|
||||
chunks = (online[i : i + 60] for i in range(0, len(online), 60))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Syncing agent modules..."))
|
||||
for chunk in chunks:
|
||||
r = Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
sleep(5)
|
||||
|
||||
has_old_config = True
|
||||
rmm_conf = "/etc/nginx/sites-available/rmm.conf"
|
||||
if os.path.exists(rmm_conf):
|
||||
with open(rmm_conf) as f:
|
||||
for line in f:
|
||||
if "location" and "builtin" in line:
|
||||
has_old_config = False
|
||||
break
|
||||
|
||||
if has_old_config:
|
||||
new_conf = """
|
||||
location /builtin/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
|
||||
alias /srv/salt/scripts/;
|
||||
}
|
||||
"""
|
||||
|
||||
after_this = """
|
||||
location /saltscripts/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
|
||||
alias /srv/salt/scripts/userdefined/;
|
||||
}
|
||||
"""
|
||||
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"WARNING: A recent update requires you to manually edit your nginx config"
|
||||
)
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR("Please add the following location block to ")
|
||||
+ self.style.WARNING(rmm_conf)
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(new_conf))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"You can paste the above right after the following block that's already in your nginx config:"
|
||||
)
|
||||
)
|
||||
self.stdout.write(after_this)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Make sure to replace rmm.yourwebsite.com with your domain"
|
||||
)
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.ERROR("After editing, restart nginx with the command ")
|
||||
+ self.style.WARNING("sudo systemctl restart nginx")
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
input("Press Enter to continue...")
|
||||
|
||||
# install go
|
||||
if not os.path.exists("/usr/local/rmmgo/"):
|
||||
self.stdout.write(self.style.SUCCESS("Installing golang"))
|
||||
subprocess.run("sudo mkdir -p /usr/local/rmmgo", shell=True)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
r = subprocess.run(
|
||||
f"wget https://golang.org/dl/go1.15.linux-amd64.tar.gz -P {tmpdir}",
|
||||
f"wget https://golang.org/dl/go1.15.5.linux-amd64.tar.gz -P {tmpdir}",
|
||||
shell=True,
|
||||
)
|
||||
|
||||
gotar = os.path.join(tmpdir, "go1.15.linux-amd64.tar.gz")
|
||||
gotar = os.path.join(tmpdir, "go1.15.5.linux-amd64.tar.gz")
|
||||
|
||||
subprocess.run(f"tar -xzf {gotar} -C {tmpdir}", shell=True)
|
||||
|
||||
|
||||
9
api/tacticalrmm/core/management/commands/reload_nats.py
Normal file
9
api/tacticalrmm/core/management/commands/reload_nats.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from tacticalrmm.utils import reload_nats
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Reload Nats"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
reload_nats()
|
||||
@@ -72,16 +72,14 @@ class CoreSettings(BaseAuditModel):
|
||||
if not self.pk and CoreSettings.objects.exists():
|
||||
raise ValidationError("There can only be one CoreSettings instance")
|
||||
|
||||
# Only runs on first create
|
||||
# for install script
|
||||
if not self.pk:
|
||||
mesh_settings = self.get_initial_mesh_settings()
|
||||
|
||||
if "mesh_token" in mesh_settings:
|
||||
self.mesh_token = mesh_settings["mesh_token"]
|
||||
if "mesh_username" in mesh_settings:
|
||||
self.mesh_username = mesh_settings["mesh_username"]
|
||||
if "mesh_site" in mesh_settings:
|
||||
self.mesh_site = mesh_settings["mesh_site"]
|
||||
try:
|
||||
self.mesh_site = settings.MESH_SITE
|
||||
self.mesh_username = settings.MESH_USERNAME
|
||||
self.mesh_token = settings.MESH_TOKEN_KEY
|
||||
except:
|
||||
pass
|
||||
|
||||
return super(CoreSettings, self).save(*args, **kwargs)
|
||||
|
||||
@@ -121,8 +119,8 @@ class CoreSettings(BaseAuditModel):
|
||||
and self.smtp_port
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def send_mail(self, subject, body, test=False):
|
||||
|
||||
@@ -168,60 +166,9 @@ class CoreSettings(BaseAuditModel):
|
||||
except Exception as e:
|
||||
logger.error(f"SMS failed to send: {e}")
|
||||
|
||||
def get_initial_mesh_settings(self):
|
||||
|
||||
mesh_settings = {}
|
||||
|
||||
# Check for Mesh Username
|
||||
try:
|
||||
if settings.MESH_USERNAME:
|
||||
mesh_settings["mesh_username"] = settings.MESH_USERNAME
|
||||
else:
|
||||
raise AttributeError("MESH_USERNAME doesn't exist")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Check for Mesh Site
|
||||
try:
|
||||
if settings.MESH_SITE:
|
||||
mesh_settings["mesh_site"] = settings.MESH_SITE
|
||||
else:
|
||||
raise AttributeError("MESH_SITE doesn't exist")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Check for Mesh Token
|
||||
try:
|
||||
if settings.MESH_TOKEN_KEY:
|
||||
mesh_settings["mesh_token"] = settings.MESH_TOKEN_KEY
|
||||
else:
|
||||
raise AttributeError("MESH_TOKEN_KEY doesn't exist")
|
||||
except AttributeError:
|
||||
filepath = "/token/token.key"
|
||||
counter = 0
|
||||
while counter < 12:
|
||||
try:
|
||||
with open(filepath, "r") as read_file:
|
||||
key = read_file.readlines()
|
||||
|
||||
# Remove key file contents for security reasons
|
||||
with open(filepath, "w") as write_file:
|
||||
write_file.write("")
|
||||
|
||||
# readlines() returns an array. Get first item
|
||||
mesh_settings["mesh_token"] = key[0].rstrip()
|
||||
break
|
||||
except (IOError, IndexError):
|
||||
pass
|
||||
|
||||
counter = counter + 1
|
||||
time.sleep(10)
|
||||
|
||||
return mesh_settings
|
||||
|
||||
@staticmethod
|
||||
def serialize(core):
|
||||
# serializes the core and returns json
|
||||
from .serializers import CoreSerializer
|
||||
|
||||
return CoreSerializer(core).data
|
||||
return CoreSerializer(core).data
|
||||
|
||||
@@ -4,8 +4,6 @@ from loguru import logger
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from tacticalrmm.celery import app
|
||||
from accounts.models import User
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
@@ -14,15 +12,6 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@app.task
|
||||
def core_maintenance_tasks():
|
||||
# cleanup any leftover agent user accounts
|
||||
agents = Agent.objects.values_list("agent_id", flat=True)
|
||||
users = User.objects.exclude(username__in=agents).filter(last_login=None)
|
||||
if users:
|
||||
users.delete()
|
||||
logger.info(
|
||||
"Removed leftover agent user accounts:", str([i.username for i in users])
|
||||
)
|
||||
|
||||
# cleanup expired runonce tasks
|
||||
tasks = AutomatedTask.objects.filter(
|
||||
task_type="runonce",
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from core.tasks import core_maintenance_tasks
|
||||
from unittest.mock import patch
|
||||
from model_bakery import baker, seq
|
||||
|
||||
|
||||
class TestCoreTasks(TacticalTestCase):
|
||||
@@ -31,3 +33,45 @@ class TestCoreTasks(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
|
||||
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks):
|
||||
url = "/core/servermaintenance/"
|
||||
|
||||
agents = baker.make_recipe("agents.online_agent", _quantity=3)
|
||||
|
||||
# test with empty data
|
||||
r = self.client.post(url, {})
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test with invalid action
|
||||
data = {"action": "invalid_action"}
|
||||
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test reload nats action
|
||||
data = {"action": "reload_nats"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
# test prune db with no tables
|
||||
data = {"action": "prune_db"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test prune db with tables
|
||||
data = {
|
||||
"action": "prune_db",
|
||||
"prune_tables": ["audit_logs", "agent_outages", "pending_actions"],
|
||||
}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
# test remove orphaned tasks
|
||||
data = {"action": "rm_orphaned_tasks"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
remove_orphaned_win_tasks.assert_called()
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -8,4 +8,5 @@ urlpatterns = [
|
||||
path("version/", views.version),
|
||||
path("emailtest/", views.email_test),
|
||||
path("dashinfo/", views.dashboard_info),
|
||||
path("servermaintenance/", views.server_maintenance),
|
||||
]
|
||||
|
||||
@@ -84,3 +84,56 @@ def email_test(request):
|
||||
return notify_error(r)
|
||||
|
||||
return Response("Email Test OK!")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def server_maintenance(request):
|
||||
from tacticalrmm.utils import reload_nats
|
||||
|
||||
if "action" not in request.data:
|
||||
return notify_error("The data is incorrect")
|
||||
|
||||
if request.data["action"] == "reload_nats":
|
||||
reload_nats()
|
||||
return Response("Nats configuration was reloaded successfully.")
|
||||
|
||||
if request.data["action"] == "rm_orphaned_tasks":
|
||||
from agents.models import Agent
|
||||
from autotasks.tasks import remove_orphaned_win_tasks
|
||||
|
||||
agents = Agent.objects.all()
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
return Response(
|
||||
"The task has been initiated. Check the Debug Log in the UI for progress."
|
||||
)
|
||||
|
||||
if request.data["action"] == "prune_db":
|
||||
from agents.models import AgentOutage
|
||||
from logs.models import AuditLog, PendingAction
|
||||
|
||||
if "prune_tables" not in request.data:
|
||||
return notify_error("The data is incorrect.")
|
||||
|
||||
tables = request.data["prune_tables"]
|
||||
records_count = 0
|
||||
if "agent_outages" in tables:
|
||||
agentoutages = AgentOutage.objects.exclude(recovery_time=None)
|
||||
records_count += agentoutages.count()
|
||||
agentoutages.delete()
|
||||
|
||||
if "audit_logs" in tables:
|
||||
auditlogs = AuditLog.objects.filter(action="check_run")
|
||||
records_count += auditlogs.count()
|
||||
auditlogs.delete()
|
||||
|
||||
if "pending_actions" in tables:
|
||||
pendingactions = PendingAction.objects.filter(status="completed")
|
||||
records_count += pendingactions.count()
|
||||
pendingactions.delete()
|
||||
|
||||
return Response(f"{records_count} records were pruned from the database")
|
||||
|
||||
return notify_error("The data is incorrect")
|
||||
|
||||
18
api/tacticalrmm/logs/migrations/0011_auto_20201119_0854.py
Normal file
18
api/tacticalrmm/logs/migrations/0011_auto_20201119_0854.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-19 08:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('logs', '0010_auto_20201110_2238'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='pendingaction',
|
||||
name='action_type',
|
||||
field=models.CharField(blank=True, choices=[('schedreboot', 'Scheduled Reboot'), ('taskaction', 'Scheduled Task Action'), ('agentupdate', 'Agent Update')], max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,4 @@
|
||||
import datetime as dt
|
||||
import json
|
||||
from abc import abstractmethod
|
||||
from django.db import models
|
||||
from tacticalrmm.middleware import get_username, get_debug_info
|
||||
@@ -7,6 +6,7 @@ from tacticalrmm.middleware import get_username, get_debug_info
|
||||
ACTION_TYPE_CHOICES = [
|
||||
("schedreboot", "Scheduled Reboot"),
|
||||
("taskaction", "Scheduled Task Action"),
|
||||
("agentupdate", "Agent Update"),
|
||||
]
|
||||
|
||||
AUDIT_ACTION_TYPE_CHOICES = [
|
||||
@@ -248,7 +248,7 @@ class PendingAction(models.Model):
|
||||
obj = dt.datetime.strptime(self.details["time"], "%Y-%m-%d %H:%M:%S")
|
||||
return dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
|
||||
elif self.action_type == "taskaction":
|
||||
elif self.action_type == "taskaction" or self.action_type == "agentupdate":
|
||||
return "Next agent check-in"
|
||||
|
||||
@property
|
||||
@@ -256,6 +256,9 @@ class PendingAction(models.Model):
|
||||
if self.action_type == "schedreboot":
|
||||
return "Device pending reboot"
|
||||
|
||||
elif self.action_type == "agentupdate":
|
||||
return f"Agent update to {self.details['version']}"
|
||||
|
||||
elif self.action_type == "taskaction":
|
||||
if self.details["action"] == "taskdelete":
|
||||
return "Device pending task deletion"
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
from django.conf import settings
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def cancel_pending_action_task(data):
|
||||
|
||||
if data["action_type"] == "schedreboot" and data["status"] == "pending":
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
agent = Agent.objects.get(pk=data["agent"])
|
||||
|
||||
task_name = data["details"]["taskname"]
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30, func="task.delete_task", arg=[f"name={task_name}"]
|
||||
)
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
logger.error(
|
||||
f"Unable to contact {agent.hostname}. Task {task_name} will need to cancelled manually."
|
||||
)
|
||||
return
|
||||
else:
|
||||
logger.info(f"Scheduled reboot cancelled on {agent.hostname}")
|
||||
|
||||
return "ok"
|
||||
@@ -122,10 +122,25 @@ class TestAuditViews(TacticalTestCase):
|
||||
{"filter": {"clientFilter": [site.client.id]}, "count": 23},
|
||||
]
|
||||
|
||||
pagination = {
|
||||
"rowsPerPage": 25,
|
||||
"page": 1,
|
||||
"sortBy": "entry_time",
|
||||
"descending": True,
|
||||
}
|
||||
|
||||
for req in data:
|
||||
resp = self.client.patch(url, req["filter"], format="json")
|
||||
resp = self.client.patch(
|
||||
url, {**req["filter"], "pagination": pagination}, format="json"
|
||||
)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), req["count"])
|
||||
self.assertEqual(
|
||||
len(resp.data["audit_logs"]),
|
||||
pagination["rowsPerPage"]
|
||||
if req["count"] > pagination["rowsPerPage"]
|
||||
else req["count"],
|
||||
)
|
||||
self.assertEqual(resp.data["total"], req["count"])
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@@ -190,54 +205,31 @@ class TestAuditViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("logs.tasks.cancel_pending_action_task.delay")
|
||||
def test_cancel_pending_action(self, mock_task):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_cancel_pending_action(self, nats_cmd):
|
||||
url = "/logs/cancelpendingaction/"
|
||||
pending_action = baker.make("logs.PendingAction")
|
||||
# TODO fix this TypeError: Object of type coroutine is not JSON serializable
|
||||
""" agent = baker.make("agents.Agent", version="1.1.1")
|
||||
pending_action = baker.make(
|
||||
"logs.PendingAction",
|
||||
agent=agent,
|
||||
details={
|
||||
"time": "2021-01-13 18:20:00",
|
||||
"taskname": "TacticalRMM_SchedReboot_wYzCCDVXlc",
|
||||
},
|
||||
)
|
||||
|
||||
serializer = PendingActionSerializer(pending_action).data
|
||||
data = {"pk": pending_action.id}
|
||||
resp = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
mock_task.assert_called_with(serializer)
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": "TacticalRMM_SchedReboot_wYzCCDVXlc"},
|
||||
}
|
||||
nats_cmd.assert_called_with(nats_data, timeout=10)
|
||||
|
||||
# try request again and it should fail since pending action doesn't exist
|
||||
resp = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
self.assertEqual(resp.status_code, 404) """
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
|
||||
class TestLogsTasks(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_cancel_pending_action_task(self, mock_salt_cmd):
|
||||
from .tasks import cancel_pending_action_task
|
||||
|
||||
pending_action = baker.make(
|
||||
"logs.PendingAction",
|
||||
action_type="schedreboot",
|
||||
status="pending",
|
||||
details={"taskname": "test_name"},
|
||||
)
|
||||
|
||||
# data that is passed to the task
|
||||
data = PendingActionSerializer(pending_action).data
|
||||
|
||||
# set return value on mock to success
|
||||
mock_salt_cmd.return_value = "success"
|
||||
# call task with valid data and see if salt is called with correct data
|
||||
ret = cancel_pending_action_task(data)
|
||||
mock_salt_cmd.assert_called_with(
|
||||
timeout=30, func="task.delete_task", arg=["name=test_name"]
|
||||
)
|
||||
# this should return successful
|
||||
self.assertEquals(ret, "ok")
|
||||
|
||||
# this run should return false
|
||||
mock_salt_cmd.reset_mock()
|
||||
mock_salt_cmd.return_value = "timeout"
|
||||
ret = cancel_pending_action_task(data)
|
||||
self.assertEquals(ret, None)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import subprocess
|
||||
|
||||
from django.conf import settings
|
||||
@@ -5,6 +6,7 @@ from django.shortcuts import get_object_or_404
|
||||
from django.http import HttpResponse
|
||||
from django.utils import timezone as djangotime
|
||||
from django.db.models import Q
|
||||
from django.core.paginator import Paginator
|
||||
from datetime import datetime as dt
|
||||
|
||||
from rest_framework.response import Response
|
||||
@@ -18,7 +20,7 @@ from accounts.models import User
|
||||
from .serializers import PendingActionSerializer, AuditLogSerializer
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from accounts.serializers import UserSerializer
|
||||
from .tasks import cancel_pending_action_task
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
|
||||
class GetAuditLogs(APIView):
|
||||
@@ -26,6 +28,14 @@ class GetAuditLogs(APIView):
|
||||
from clients.models import Client
|
||||
from agents.models import Agent
|
||||
|
||||
pagination = request.data["pagination"]
|
||||
|
||||
order_by = (
|
||||
f"-{pagination['sortBy']}"
|
||||
if pagination["descending"]
|
||||
else f"{pagination['sortBy']}"
|
||||
)
|
||||
|
||||
agentFilter = Q()
|
||||
clientFilter = Q()
|
||||
actionFilter = Q()
|
||||
@@ -67,9 +77,18 @@ class GetAuditLogs(APIView):
|
||||
.filter(actionFilter)
|
||||
.filter(objectFilter)
|
||||
.filter(timeFilter)
|
||||
)
|
||||
).order_by(order_by)
|
||||
|
||||
return Response(AuditLogSerializer(audit_logs, many=True).data)
|
||||
paginator = Paginator(audit_logs, pagination["rowsPerPage"])
|
||||
|
||||
return Response(
|
||||
{
|
||||
"audit_logs": AuditLogSerializer(
|
||||
paginator.get_page(pagination["page"]), many=True
|
||||
).data,
|
||||
"total": paginator.count,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class FilterOptionsAuditLog(APIView):
|
||||
@@ -95,19 +114,26 @@ def agent_pending_actions(request, pk):
|
||||
|
||||
@api_view()
|
||||
def all_pending_actions(request):
|
||||
actions = PendingAction.objects.all()
|
||||
actions = PendingAction.objects.all().select_related("agent")
|
||||
return Response(PendingActionSerializer(actions, many=True).data)
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
def cancel_pending_action(request):
|
||||
action = get_object_or_404(PendingAction, pk=request.data["pk"])
|
||||
data = PendingActionSerializer(action).data
|
||||
cancel_pending_action_task.delay(data)
|
||||
if not action.agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": action.details["taskname"]},
|
||||
}
|
||||
r = asyncio.run(action.agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
action.delete()
|
||||
return Response(
|
||||
f"{action.agent.hostname}: {action.description} will be cancelled shortly"
|
||||
)
|
||||
return Response(f"{action.agent.hostname}: {action.description} was cancelled")
|
||||
|
||||
|
||||
@api_view()
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
amqp==2.6.1
|
||||
asgiref==3.3.0
|
||||
asgiref==3.3.1
|
||||
asyncio-nats-client==0.11.4
|
||||
billiard==3.6.3.0
|
||||
celery==4.4.6
|
||||
certifi==2020.11.8
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
cryptography==3.2.1
|
||||
decorator==4.4.2
|
||||
Django==3.1.3
|
||||
Django==3.1.4
|
||||
django-cors-headers==3.5.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.2
|
||||
@@ -25,11 +26,11 @@ pyparsing==2.4.7
|
||||
pytz==2020.4
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.24.0
|
||||
requests==2.25.0
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.47.0
|
||||
urllib3==1.25.11
|
||||
twilio==6.49.0
|
||||
urllib3==1.26.2
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.1
|
||||
vine==1.3.0
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
from django.conf import settings
|
||||
|
||||
SCRIPT_SHELLS = [
|
||||
("powershell", "Powershell"),
|
||||
@@ -38,9 +39,9 @@ class Script(BaseAuditModel):
|
||||
@property
|
||||
def file(self):
|
||||
if self.script_type == "userdefined":
|
||||
return f"/srv/salt/scripts/userdefined/{self.filename}"
|
||||
return f"{settings.SCRIPTS_DIR}/userdefined/{self.filename}"
|
||||
else:
|
||||
return f"/srv/salt/scripts/{self.filename}"
|
||||
return f"{settings.SCRIPTS_DIR}/{self.filename}"
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
@@ -64,11 +65,11 @@ class Script(BaseAuditModel):
|
||||
# files will be copied by the update script or in docker to /srv/salt/scripts
|
||||
|
||||
# for install script
|
||||
try:
|
||||
if not settings.DOCKER_BUILD:
|
||||
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
|
||||
# for docker
|
||||
except:
|
||||
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[0], "scripts")
|
||||
else:
|
||||
scripts_dir = settings.SCRIPTS_DIR
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.serializers import ModelSerializer, ValidationError, ReadOnlyField
|
||||
from .models import Script
|
||||
|
||||
@@ -27,7 +28,7 @@ class ScriptSerializer(ModelSerializer):
|
||||
# but only if adding, not if editing since will overwrite if edit
|
||||
if not self.instance:
|
||||
script_path = os.path.join(
|
||||
"/srv/salt/scripts/userdefined", val["filename"]
|
||||
f"{settings.SCRIPTS_DIR}/userdefined", val["filename"]
|
||||
)
|
||||
if os.path.exists(script_path):
|
||||
raise ValidationError(
|
||||
|
||||
@@ -1,38 +1,73 @@
|
||||
import asyncio
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
from agents.models import Agent
|
||||
from .models import Script
|
||||
from scripts.models import Script
|
||||
|
||||
|
||||
@app.task
|
||||
def run_script_bg_task(data):
|
||||
agent = Agent.objects.get(pk=data["agentpk"])
|
||||
script = Script.objects.get(pk=data["scriptpk"])
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout):
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
|
||||
agent.salt_api_async(
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": data["timeout"],
|
||||
"args": data["args"],
|
||||
},
|
||||
)
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
agents_salt = [agent for agent in agents if not agent.has_nats]
|
||||
minions = [agent.salt_id for agent in agents_salt]
|
||||
|
||||
if minions:
|
||||
Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="cmd.run_bg",
|
||||
kwargs={
|
||||
"cmd": cmd,
|
||||
"shell": shell,
|
||||
"timeout": timeout,
|
||||
},
|
||||
)
|
||||
|
||||
if agents_nats:
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"command": cmd,
|
||||
"shell": shell,
|
||||
},
|
||||
}
|
||||
for agent in agents_nats:
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def run_bulk_script_task(data):
|
||||
# for powershell and batch scripts only, workaround for salt bg script bug
|
||||
script = Script.objects.get(pk=data["scriptpk"])
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout):
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
|
||||
Agent.salt_batch_async(
|
||||
minions=data["minions"],
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
agents_salt = [agent for agent in agents if not agent.has_nats]
|
||||
minions = [agent.salt_id for agent in agents_salt]
|
||||
|
||||
if minions:
|
||||
Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": timeout,
|
||||
"args": args,
|
||||
"bg": True if script.shell == "python" else False, # salt bg script bug
|
||||
},
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "runscript",
|
||||
"timeout": timeout,
|
||||
"script_args": args,
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
"timeout": data["timeout"],
|
||||
"args": data["args"],
|
||||
},
|
||||
)
|
||||
}
|
||||
for agent in agents_nats:
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
@@ -94,7 +94,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
def test_load_community_scripts(self):
|
||||
valid_shells = ["powershell", "python", "cmd"]
|
||||
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
|
||||
|
||||
if not settings.DOCKER_BUILD:
|
||||
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
|
||||
else:
|
||||
scripts_dir = settings.SCRIPTS_DIR
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
|
||||
@@ -9,21 +9,6 @@ class TestServiceViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
|
||||
def test_get_services(self):
|
||||
|
||||
# test a call where agent doesn't exist
|
||||
resp = self.client.get("/services/500/services/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
agent = baker.make_recipe("agents.agent_with_services")
|
||||
url = f"/services/{agent.pk}/services/"
|
||||
serializer = ServicesSerializer(agent)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_default_services(self):
|
||||
url = "/services/defaultservices/"
|
||||
resp = self.client.get(url, format="json")
|
||||
@@ -32,16 +17,16 @@ class TestServiceViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_get_refreshed_services(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_services(self, nats_cmd):
|
||||
# test a call where agent doesn't exist
|
||||
resp = self.client.get("/services/500/refreshedservices/", format="json")
|
||||
resp = self.client.get("/services/500/services/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
agent = baker.make_recipe("agents.agent_with_services")
|
||||
url = f"/services/{agent.pk}/refreshedservices/"
|
||||
url = f"/services/{agent.pk}/services/"
|
||||
|
||||
salt_return = [
|
||||
nats_return = [
|
||||
{
|
||||
"pid": 880,
|
||||
"name": "AeLookupSvc",
|
||||
@@ -65,30 +50,23 @@ class TestServiceViews(TacticalTestCase):
|
||||
]
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
nats_cmd.return_value = "timeout"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(timeout=15, func="win_agent.get_services")
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "error"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(timeout=15, func="win_agent.get_services")
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.assert_called_with(data={"func": "winservices"}, timeout=10)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test successful attempt
|
||||
salt_api_cmd.return_value = salt_return
|
||||
nats_cmd.return_value = nats_return
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(timeout=15, func="win_agent.get_services")
|
||||
self.assertEquals(Agent.objects.get(pk=agent.pk).services, salt_return)
|
||||
nats_cmd.assert_called_with(data={"func": "winservices"}, timeout=10)
|
||||
self.assertEquals(Agent.objects.get(pk=agent.pk).services, nats_return)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_service_action(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_service_action(self, nats_cmd):
|
||||
url = "/services/serviceaction/"
|
||||
|
||||
invalid_data = {"pk": 500, "sv_name": "AeLookupSvc", "sv_action": "restart"}
|
||||
@@ -101,47 +79,37 @@ class TestServiceViews(TacticalTestCase):
|
||||
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "sv_action": "restart"}
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
nats_cmd.return_value = "timeout"
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=45,
|
||||
func=f"service.restart",
|
||||
arg="AeLookupSvc",
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "winsvcaction",
|
||||
"payload": {
|
||||
"name": "AeLookupSvc",
|
||||
"action": "stop",
|
||||
},
|
||||
},
|
||||
timeout=32,
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=45,
|
||||
func=f"service.restart",
|
||||
arg="AeLookupSvc",
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test successful attempt
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = {"success": True, "errormsg": ""}
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=45,
|
||||
func=f"service.restart",
|
||||
arg="AeLookupSvc",
|
||||
)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_service_detail(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_service_detail(self, nats_cmd):
|
||||
# test a call where agent doesn't exist
|
||||
resp = self.client.get(
|
||||
"/services/500/doesntexist/servicedetail/", format="json"
|
||||
)
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
salt_return = {
|
||||
nats_return = {
|
||||
"pid": 812,
|
||||
"name": "ALG",
|
||||
"status": "stopped",
|
||||
@@ -156,29 +124,27 @@ class TestServiceViews(TacticalTestCase):
|
||||
url = f"/services/{agent.pk}/alg/servicedetail/"
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
nats_cmd.return_value = "timeout"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(timeout=20, func="service.info", arg="alg")
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(timeout=20, func="service.info", arg="alg")
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "winsvcdetail", "payload": {"name": "alg"}}, timeout=10
|
||||
)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test successful attempt
|
||||
salt_api_cmd.return_value = salt_return
|
||||
nats_cmd.return_value = nats_return
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(timeout=20, func="service.info", arg="alg")
|
||||
self.assertEquals(resp.data, salt_return)
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "winsvcdetail", "payload": {"name": "alg"}}, timeout=10
|
||||
)
|
||||
self.assertEquals(resp.data, nats_return)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_edit_service(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_edit_service(self, nats_cmd):
|
||||
url = "/services/editservice/"
|
||||
agent = baker.make_recipe("agents.agent_with_services")
|
||||
|
||||
@@ -189,64 +155,43 @@ class TestServiceViews(TacticalTestCase):
|
||||
|
||||
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "autodelay"}
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
# test timeout
|
||||
nats_cmd.return_value = "timeout"
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="service.modify",
|
||||
arg=data["sv_name"],
|
||||
kwargs={"start_type": "auto", "start_delayed": True},
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="service.modify",
|
||||
arg=data["sv_name"],
|
||||
kwargs={"start_type": "auto", "start_delayed": True},
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test successful attempt autodelay
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = {"success": True, "errormsg": ""}
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="service.modify",
|
||||
arg=data["sv_name"],
|
||||
kwargs={"start_type": "auto", "start_delayed": True},
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "editwinsvc",
|
||||
"payload": {
|
||||
"name": "AeLookupSvc",
|
||||
"startType": "autodelay",
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test successful attempt with auto
|
||||
# test error message from agent
|
||||
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "auto"}
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = {
|
||||
"success": False,
|
||||
"errormsg": "The parameter is incorrect",
|
||||
}
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="service.modify",
|
||||
arg=data["sv_name"],
|
||||
kwargs={"start_type": "auto", "start_delayed": False},
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
# test successful attempt with manual
|
||||
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "manual"}
|
||||
salt_api_cmd.return_value = True
|
||||
# test catch all
|
||||
data = {"pk": agent.pk, "sv_name": "AeLookupSvc", "edit_action": "auto"}
|
||||
nats_cmd.return_value = {"success": False, "errormsg": ""}
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="service.modify",
|
||||
arg=data["sv_name"],
|
||||
kwargs={"start_type": "manual"},
|
||||
)
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(resp.data, "Something went wrong")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -4,7 +4,6 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("<int:pk>/services/", views.get_services),
|
||||
path("defaultservices/", views.default_services),
|
||||
path("<int:pk>/refreshedservices/", views.get_refreshed_services),
|
||||
path("serviceaction/", views.service_action),
|
||||
path("<int:pk>/<svcname>/servicedetail/", views.service_detail),
|
||||
path("editservice/", views.edit_service),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
|
||||
from rest_framework.response import Response
|
||||
@@ -19,6 +20,15 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
@api_view()
|
||||
def get_services(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "winservices"}, timeout=10))
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
agent.services = r
|
||||
agent.save(update_fields=["services"])
|
||||
return Response(ServicesSerializer(agent).data)
|
||||
|
||||
|
||||
@@ -27,81 +37,81 @@ def default_services(request):
|
||||
return Response(Check.load_default_services())
|
||||
|
||||
|
||||
@api_view()
|
||||
def get_refreshed_services(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=15, func="win_agent.get_services")
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error" or not r:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
agent.services = r
|
||||
agent.save(update_fields=["services"])
|
||||
return Response(ServicesSerializer(agent).data)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def service_action(request):
|
||||
data = request.data
|
||||
pk = data["pk"]
|
||||
service_name = data["sv_name"]
|
||||
service_action = data["sv_action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=45,
|
||||
func=f"service.{service_action}",
|
||||
arg=service_name,
|
||||
)
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
action = request.data["sv_action"]
|
||||
data = {
|
||||
"func": "winsvcaction",
|
||||
"payload": {
|
||||
"name": request.data["sv_name"],
|
||||
},
|
||||
}
|
||||
# response struct from agent: {success: bool, errormsg: string}
|
||||
if action == "restart":
|
||||
data["payload"]["action"] = "stop"
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=32))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif not r["success"] and r["errormsg"]:
|
||||
return notify_error(r["errormsg"])
|
||||
elif r["success"]:
|
||||
data["payload"]["action"] = "start"
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=32))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif not r["success"] and r["errormsg"]:
|
||||
return notify_error(r["errormsg"])
|
||||
elif r["success"]:
|
||||
return Response("ok")
|
||||
else:
|
||||
data["payload"]["action"] = action
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=32))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif not r["success"] and r["errormsg"]:
|
||||
return notify_error(r["errormsg"])
|
||||
elif r["success"]:
|
||||
return Response("ok")
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error" or not r:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
return Response("ok")
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
|
||||
@api_view()
|
||||
def service_detail(request, pk, svcname):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=20, func="service.info", arg=svcname)
|
||||
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
data = {"func": "winsvcdetail", "payload": {"name": svcname}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error" or not r:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def edit_service(request):
|
||||
data = request.data
|
||||
pk = data["pk"]
|
||||
service_name = data["sv_name"]
|
||||
edit_action = data["edit_action"]
|
||||
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
|
||||
if edit_action == "autodelay":
|
||||
kwargs = {"start_type": "auto", "start_delayed": True}
|
||||
elif edit_action == "auto":
|
||||
kwargs = {"start_type": "auto", "start_delayed": False}
|
||||
else:
|
||||
kwargs = {"start_type": edit_action}
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="service.modify",
|
||||
arg=service_name,
|
||||
kwargs=kwargs,
|
||||
)
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
data = {
|
||||
"func": "editwinsvc",
|
||||
"payload": {
|
||||
"name": request.data["sv_name"],
|
||||
"startType": request.data["edit_action"],
|
||||
},
|
||||
}
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
# response struct from agent: {success: bool, errormsg: string}
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error" or not r:
|
||||
return notify_error("Something went wrong")
|
||||
elif not r["success"] and r["errormsg"]:
|
||||
return notify_error(r["errormsg"])
|
||||
elif r["success"]:
|
||||
return Response("ok")
|
||||
|
||||
return Response("ok")
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import string
|
||||
import asyncio
|
||||
from time import sleep
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
@@ -7,6 +7,7 @@ from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
from .models import ChocoSoftware, ChocoLog, InstalledSoftware
|
||||
from tacticalrmm.utils import filter_software
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -86,43 +87,6 @@ def update_chocos():
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def get_installed_software(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
logger.error(f"Timed out trying to get installed software on {agent.salt_id}")
|
||||
return
|
||||
|
||||
printable = set(string.printable)
|
||||
|
||||
try:
|
||||
software = [
|
||||
{
|
||||
"name": "".join(filter(lambda x: x in printable, k)),
|
||||
"version": "".join(filter(lambda x: x in printable, v)),
|
||||
}
|
||||
for k, v in r.items()
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to get installed software on {agent.salt_id}: {e}")
|
||||
return
|
||||
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=software).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.get()
|
||||
s.software = software
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def install_program(pk, name, version):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
@@ -167,6 +131,4 @@ def install_program(pk, name, version):
|
||||
agent=agent, name=name, version=version, message=output, installed=installed
|
||||
).save()
|
||||
|
||||
get_installed_software.delay(agent.pk)
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -62,72 +62,6 @@ class TestSoftwareViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_chocos_refresh(self, salt_api_cmd):
|
||||
|
||||
salt_return = {"git": "2.3.4", "docker": "1.0.2"}
|
||||
|
||||
# test a call where agent doesn't exist
|
||||
resp = self.client.get("/software/refresh/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
url = f"/software/refresh/{agent.pk}/"
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test success and created new software object
|
||||
salt_api_cmd.return_value = salt_return
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
self.assertTrue(InstalledSoftware.objects.filter(agent=agent).exists())
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test success and updates software object
|
||||
salt_api_cmd.return_value = salt_return
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
software = agent.installedsoftware_set.get()
|
||||
|
||||
expected = [
|
||||
{"name": "git", "version": "2.3.4"},
|
||||
{"name": "docker", "version": "1.0.2"},
|
||||
]
|
||||
|
||||
self.assertTrue(InstalledSoftware.objects.filter(agent=agent).exists())
|
||||
self.assertEquals(software.software, expected)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
|
||||
class TestSoftwareTasks(TacticalTestCase):
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@@ -187,46 +121,7 @@ class TestSoftwareTasks(TacticalTestCase):
|
||||
self.assertEquals(salt_api_cmd.call_count, 2)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_get_installed_software(self, salt_api_cmd):
|
||||
from .tasks import get_installed_software
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
|
||||
salt_return = {"git": "2.3.4", "docker": "1.0.2"}
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = get_installed_software(agent.pk)
|
||||
self.assertFalse(ret)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=30,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test successful attempt
|
||||
salt_api_cmd.return_value = salt_return
|
||||
ret = get_installed_software(agent.pk)
|
||||
self.assertTrue(ret)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=30,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
software = agent.installedsoftware_set.get()
|
||||
|
||||
expected = [
|
||||
{"name": "git", "version": "2.3.4"},
|
||||
{"name": "docker", "version": "1.0.2"},
|
||||
]
|
||||
|
||||
self.assertTrue(InstalledSoftware.objects.filter(agent=agent).exists())
|
||||
self.assertEquals(software.software, expected)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("software.tasks.get_installed_software.delay")
|
||||
def test_install_program(self, get_installed_software, salt_api_cmd):
|
||||
def test_install_program(self, salt_api_cmd):
|
||||
from .tasks import install_program
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
@@ -247,6 +142,5 @@ class TestSoftwareTasks(TacticalTestCase):
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=900, func="chocolatey.install", arg=["git", "version=2.3.4"]
|
||||
)
|
||||
get_installed_software.assert_called_with(agent.pk)
|
||||
|
||||
self.assertTrue(ChocoLog.objects.filter(agent=agent, name="git").exists())
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import string
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
@@ -9,7 +10,7 @@ from agents.models import Agent
|
||||
from .models import ChocoSoftware, InstalledSoftware
|
||||
from .serializers import InstalledSoftwareSerializer
|
||||
from .tasks import install_program
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, filter_software
|
||||
|
||||
|
||||
@api_view()
|
||||
@@ -41,35 +42,20 @@ def get_installed(request, pk):
|
||||
@api_view()
|
||||
def refresh_installed(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="pkg.list_pkgs",
|
||||
kwargs={"include_components": False, "include_updates": False},
|
||||
)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
if r == "timeout":
|
||||
r: Any = asyncio.run(agent.nats_cmd({"func": "softwarelist"}, timeout=15))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
printable = set(string.printable)
|
||||
|
||||
try:
|
||||
software = [
|
||||
{
|
||||
"name": "".join(filter(lambda x: x in printable, k)),
|
||||
"version": "".join(filter(lambda x: x in printable, v)),
|
||||
}
|
||||
for k, v in r.items()
|
||||
]
|
||||
except Exception:
|
||||
return notify_error("Something went wrong")
|
||||
sw = filter_software(r)
|
||||
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=software).save()
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.get()
|
||||
s.software = software
|
||||
s = agent.installedsoftware_set.first()
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -27,27 +27,23 @@ app.conf.beat_schedule = {
|
||||
},
|
||||
"auto-approve-win-updates": {
|
||||
"task": "winupdate.tasks.auto_approve_updates_task",
|
||||
"schedule": crontab(minute=0, hour="*/8"),
|
||||
"schedule": crontab(minute=2, hour="*/8"),
|
||||
},
|
||||
"install-scheduled-win-updates": {
|
||||
"task": "winupdate.tasks.check_agent_update_schedule_task",
|
||||
"schedule": crontab(minute=0, hour="*"),
|
||||
"schedule": crontab(minute=5, hour="*"),
|
||||
},
|
||||
"sync-modules": {
|
||||
"task": "agents.tasks.batch_sync_modules_task",
|
||||
"schedule": crontab(minute=40, hour="*/4"),
|
||||
},
|
||||
"sys-info": {
|
||||
"task": "agents.tasks.batch_sysinfo_task",
|
||||
"schedule": crontab(minute=15, hour="*/2"),
|
||||
},
|
||||
"update-salt": {
|
||||
"task": "agents.tasks.update_salt_minion_task",
|
||||
"schedule": crontab(minute=30, hour="*/6"),
|
||||
"schedule": crontab(minute=25, hour="*/4"),
|
||||
},
|
||||
"agent-auto-update": {
|
||||
"task": "agents.tasks.auto_self_agent_update_task",
|
||||
"schedule": crontab(minute=50, hour="*/3"),
|
||||
"schedule": crontab(minute=35, hour="*"),
|
||||
},
|
||||
"agents-sync": {
|
||||
"task": "agents.tasks.sync_sysinfo_task",
|
||||
"schedule": crontab(minute=55, hour="*"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,6 @@ if not DEBUG:
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
SALT_USERNAME = "changeme"
|
||||
SALT_PASSWORD = "changeme"
|
||||
MESH_USERNAME = "changeme"
|
||||
|
||||
@@ -16,16 +16,15 @@ def get_debug_info():
|
||||
EXCLUDE_PATHS = (
|
||||
"/api/v3",
|
||||
"/api/v2",
|
||||
"/api/v1",
|
||||
"/logs/auditlogs",
|
||||
"/winupdate/winupdater",
|
||||
"/winupdate/results",
|
||||
f"/{settings.ADMIN_URL}",
|
||||
"/logout",
|
||||
"/agents/installagent",
|
||||
"/logs/downloadlog",
|
||||
)
|
||||
|
||||
ENDS_WITH = "/services/"
|
||||
|
||||
|
||||
class AuditMiddleware:
|
||||
def __init__(self, get_response):
|
||||
@@ -37,7 +36,9 @@ class AuditMiddleware:
|
||||
return response
|
||||
|
||||
def process_view(self, request, view_func, view_args, view_kwargs):
|
||||
if not request.path.startswith(EXCLUDE_PATHS):
|
||||
if not request.path.startswith(EXCLUDE_PATHS) and not request.path.endswith(
|
||||
ENDS_WITH
|
||||
):
|
||||
# https://stackoverflow.com/questions/26240832/django-and-middleware-which-uses-request-user-is-always-anonymous
|
||||
try:
|
||||
# DRF saves the class of the view function as the .cls property
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from datetime import timedelta
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
SCRIPTS_DIR = "/srv/salt/scripts"
|
||||
|
||||
DOCKER_BUILD = False
|
||||
|
||||
LOG_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/log")
|
||||
|
||||
EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
@@ -10,23 +15,25 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.1.7"
|
||||
TRMM_VERSION = "0.2.9"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.90"
|
||||
APP_VER = "0.0.98"
|
||||
|
||||
# https://github.com/wh1te909/salt
|
||||
LATEST_SALT_VER = "1.1.0"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.0.2"
|
||||
LATEST_AGENT_VER = "1.1.4"
|
||||
|
||||
MESH_VER = "0.6.62"
|
||||
MESH_VER = "0.7.14"
|
||||
|
||||
SALT_MASTER_VER = "3002.2"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "2"
|
||||
NPM_VER = "1"
|
||||
PIP_VER = "4"
|
||||
NPM_VER = "3"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||
@@ -51,7 +58,6 @@ INSTALLED_APPS = [
|
||||
"knox",
|
||||
"corsheaders",
|
||||
"accounts",
|
||||
"api",
|
||||
"apiv2",
|
||||
"apiv3",
|
||||
"clients",
|
||||
@@ -149,38 +155,6 @@ LOG_CONFIG = {
|
||||
"handlers": [{"sink": os.path.join(LOG_DIR, "debug.log"), "serialize": False}]
|
||||
}
|
||||
|
||||
if "TRAVIS" in os.environ:
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": "travisci",
|
||||
"USER": "travisci",
|
||||
"PASSWORD": "travisSuperSekret6645",
|
||||
"HOST": "127.0.0.1",
|
||||
"PORT": "",
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
"DATETIME_FORMAT": "%b-%d-%Y - %H:%M",
|
||||
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": ("knox.auth.TokenAuthentication",),
|
||||
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
|
||||
}
|
||||
|
||||
DEBUG = True
|
||||
SECRET_KEY = "abcdefghijklmnoptravis123456789"
|
||||
|
||||
ADMIN_URL = "abc123456/"
|
||||
|
||||
SALT_USERNAME = "travis"
|
||||
SALT_PASSWORD = "travis"
|
||||
MESH_USERNAME = "travis"
|
||||
MESH_SITE = "https://example.com"
|
||||
MESH_TOKEN_KEY = "bd65e957a1e70c622d32523f61508400d6cd0937001a7ac12042227eba0b9ed625233851a316d4f489f02994145f74537a331415d00047dbbf13d940f556806dffe7a8ce1de216dc49edbad0c1a7399c"
|
||||
REDIS_HOST = "localhost"
|
||||
SALT_HOST = "127.0.0.1"
|
||||
|
||||
if "AZPIPELINE" in os.environ:
|
||||
DATABASES = {
|
||||
"default": {
|
||||
@@ -200,11 +174,14 @@ if "AZPIPELINE" in os.environ:
|
||||
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
|
||||
}
|
||||
|
||||
ALLOWED_HOSTS = ["api.example.com"]
|
||||
DOCKER_BUILD = True
|
||||
DEBUG = True
|
||||
SECRET_KEY = "abcdefghijklmnoptravis123456789"
|
||||
|
||||
ADMIN_URL = "abc123456/"
|
||||
|
||||
SCRIPTS_DIR = os.path.join(Path(BASE_DIR).parents[1], "scripts")
|
||||
SALT_USERNAME = "pipeline"
|
||||
SALT_PASSWORD = "pipeline"
|
||||
MESH_USERNAME = "pipeline"
|
||||
|
||||
@@ -10,7 +10,6 @@ urlpatterns = [
|
||||
path("login/", LoginView.as_view()),
|
||||
path("logout/", knox_views.LogoutView.as_view()),
|
||||
path("logoutall/", knox_views.LogoutAllView.as_view()),
|
||||
path("api/v1/", include("api.urls")),
|
||||
path("api/v2/", include("apiv2.urls")),
|
||||
path("api/v3/", include("apiv3.urls")),
|
||||
path("clients/", include("clients.urls")),
|
||||
|
||||
@@ -1,4 +1,124 @@
|
||||
import json
|
||||
import os
|
||||
import string
|
||||
import subprocess
|
||||
import time
|
||||
from typing import List, Dict
|
||||
from loguru import logger
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
SoftwareList = List[Dict[str, str]]
|
||||
|
||||
WEEK_DAYS = {
|
||||
"Sunday": 0x1,
|
||||
"Monday": 0x2,
|
||||
"Tuesday": 0x4,
|
||||
"Wednesday": 0x8,
|
||||
"Thursday": 0x10,
|
||||
"Friday": 0x20,
|
||||
"Saturday": 0x40,
|
||||
}
|
||||
|
||||
|
||||
def get_bit_days(days: List[str]) -> int:
|
||||
bit_days = 0
|
||||
for day in days:
|
||||
bit_days |= WEEK_DAYS.get(day)
|
||||
return bit_days
|
||||
|
||||
|
||||
def bitdays_to_string(day: int) -> str:
|
||||
ret = []
|
||||
if day == 127:
|
||||
return "Every day"
|
||||
|
||||
if day & WEEK_DAYS["Sunday"]:
|
||||
ret.append("Sunday")
|
||||
if day & WEEK_DAYS["Monday"]:
|
||||
ret.append("Monday")
|
||||
if day & WEEK_DAYS["Tuesday"]:
|
||||
ret.append("Tuesday")
|
||||
if day & WEEK_DAYS["Wednesday"]:
|
||||
ret.append("Wednesday")
|
||||
if day & WEEK_DAYS["Thursday"]:
|
||||
ret.append("Thursday")
|
||||
if day & WEEK_DAYS["Friday"]:
|
||||
ret.append("Friday")
|
||||
if day & WEEK_DAYS["Saturday"]:
|
||||
ret.append("Saturday")
|
||||
|
||||
return ", ".join(ret)
|
||||
|
||||
|
||||
def filter_software(sw: SoftwareList) -> SoftwareList:
|
||||
ret: SoftwareList = []
|
||||
printable = set(string.printable)
|
||||
for s in sw:
|
||||
ret.append(
|
||||
{
|
||||
"name": "".join(filter(lambda x: x in printable, s["name"])),
|
||||
"version": "".join(filter(lambda x: x in printable, s["version"])),
|
||||
"publisher": "".join(filter(lambda x: x in printable, s["publisher"])),
|
||||
"install_date": s["install_date"],
|
||||
"size": s["size"],
|
||||
"source": s["source"],
|
||||
"location": s["location"],
|
||||
"uninstall": s["uninstall"],
|
||||
}
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def reload_nats():
|
||||
users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}]
|
||||
agents = Agent.objects.prefetch_related("user").only("pk", "agent_id")
|
||||
for agent in agents:
|
||||
try:
|
||||
users.append(
|
||||
{"user": agent.agent_id, "password": agent.user.auth_token.key}
|
||||
)
|
||||
except:
|
||||
logger.critical(
|
||||
f"{agent.hostname} does not have a user account, NATS will not work"
|
||||
)
|
||||
|
||||
domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1]
|
||||
if hasattr(settings, "CERT_FILE") and hasattr(settings, "KEY_FILE"):
|
||||
if os.path.exists(settings.CERT_FILE) and os.path.exists(settings.KEY_FILE):
|
||||
cert_file = settings.CERT_FILE
|
||||
key_file = settings.KEY_FILE
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
else:
|
||||
cert_file = f"/etc/letsencrypt/live/{domain}/fullchain.pem"
|
||||
key_file = f"/etc/letsencrypt/live/{domain}/privkey.pem"
|
||||
|
||||
config = {
|
||||
"tls": {
|
||||
"cert_file": cert_file,
|
||||
"key_file": key_file,
|
||||
},
|
||||
"authorization": {"users": users},
|
||||
"max_payload": 2048576005,
|
||||
}
|
||||
|
||||
conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf")
|
||||
with open(conf, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
if not settings.DOCKER_BUILD:
|
||||
time.sleep(0.5)
|
||||
subprocess.run(
|
||||
["/usr/local/bin/nats-server", "-signal", "reload"], capture_output=True
|
||||
)
|
||||
|
||||
@@ -107,7 +107,7 @@ def check_agent_update_schedule_task():
|
||||
def check_for_updates_task(pk, wait=False, auto_approve=False):
|
||||
|
||||
if wait:
|
||||
sleep(70)
|
||||
sleep(120)
|
||||
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
ret = agent.salt_api_cmd(
|
||||
|
||||
@@ -175,7 +175,7 @@ class WinupdateTasks(TacticalTestCase):
|
||||
agent_salt_cmd.assert_called_with(func="win_agent.install_updates")
|
||||
self.assertEquals(agent_salt_cmd.call_count, 2)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
""" @patch("agents.models.Agent.salt_api_async")
|
||||
def test_check_agent_update_monthly_schedule(self, agent_salt_cmd):
|
||||
from .tasks import check_agent_update_schedule_task
|
||||
|
||||
@@ -204,7 +204,7 @@ class WinupdateTasks(TacticalTestCase):
|
||||
|
||||
check_agent_update_schedule_task()
|
||||
agent_salt_cmd.assert_called_with(func="win_agent.install_updates")
|
||||
self.assertEquals(agent_salt_cmd.call_count, 2)
|
||||
self.assertEquals(agent_salt_cmd.call_count, 2) """
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_check_for_updates(self, salt_api_cmd):
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("<int:pk>/getwinupdates/", views.get_win_updates),
|
||||
path("<int:pk>/runupdatescan/", views.run_update_scan),
|
||||
path("editpolicy/", views.edit_policy),
|
||||
path("winupdater/", views.win_updater),
|
||||
path("results/", v3_views.WinUpdater.as_view()),
|
||||
path("<int:pk>/installnow/", views.install_updates),
|
||||
]
|
||||
|
||||
@@ -58,20 +58,3 @@ def edit_policy(request):
|
||||
patch.action = request.data["policy"]
|
||||
patch.save(update_fields=["action"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view()
|
||||
@authentication_classes((TokenAuthentication,))
|
||||
@permission_classes((IsAuthenticated,))
|
||||
def win_updater(request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.delete_superseded_updates()
|
||||
patches = (
|
||||
WinUpdate.objects.filter(agent=agent)
|
||||
.exclude(installed=True)
|
||||
.filter(action="approve")
|
||||
)
|
||||
if patches:
|
||||
return Response(ApprovedUpdateSerializer(patches, many=True).data)
|
||||
|
||||
return Response("nopatches")
|
||||
@@ -27,15 +27,21 @@ jobs:
|
||||
source env/bin/activate
|
||||
cd /myagent/_work/1/s/api/tacticalrmm
|
||||
pip install --no-cache-dir --upgrade pip
|
||||
pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
|
||||
pip install --no-cache-dir setuptools==50.3.2 wheel==0.36.1
|
||||
pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
|
||||
displayName: "Install Python Dependencies"
|
||||
|
||||
- script: |
|
||||
cd /myagent/_work/1/s/api
|
||||
git config user.email "admin@example.com"
|
||||
git config user.name "Bob"
|
||||
git fetch
|
||||
git checkout develop
|
||||
git pull
|
||||
source env/bin/activate
|
||||
cd /myagent/_work/1/s/api/tacticalrmm
|
||||
python manage.py test -v 2
|
||||
coverage run manage.py test -v 2
|
||||
coveralls
|
||||
displayName: "Run django tests"
|
||||
|
||||
- script: |
|
||||
|
||||
27
backup.sh
Normal file → Executable file
27
backup.sh
Normal file → Executable file
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="2"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh'
|
||||
SCRIPT_VERSION="4"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
@@ -31,11 +31,25 @@ POSTGRES_PW="hunter2"
|
||||
|
||||
#####################################################
|
||||
|
||||
if [[ "$POSTGRES_USER" == "changeme" || "$POSTGRES_PW" == "hunter2" ]]; then
|
||||
printf >&2 "${RED}You must change the postgres username/password at the top of this file.${NC}\n"
|
||||
printf >&2 "${RED}Check the github readme for where to find them.${NC}\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d /rmmbackups ]; then
|
||||
sudo mkdir /rmmbackups
|
||||
sudo chown ${USER}:${USER} /rmmbackups
|
||||
fi
|
||||
|
||||
if [ -d /meshcentral/meshcentral-backup ]; then
|
||||
rm -f /meshcentral/meshcentral-backup/*
|
||||
fi
|
||||
|
||||
if [ -d /meshcentral/meshcentral-coredumps ]; then
|
||||
rm -f /meshcentral/meshcentral-coredumps/*
|
||||
fi
|
||||
|
||||
dt_now=$(date '+%Y_%m_%d__%H_%M_%S')
|
||||
tmp_dir=$(mktemp -d -t tacticalrmm-XXXXXXXXXXXXXXXXXXXXX)
|
||||
sysd="/etc/systemd/system"
|
||||
@@ -58,18 +72,13 @@ mongodump --gzip --out=${tmp_dir}/meshcentral/mongo
|
||||
sudo tar -czvf ${tmp_dir}/salt/etc-salt.tar.gz -C /etc/salt .
|
||||
tar -czvf ${tmp_dir}/salt/srv-salt.tar.gz -C /srv/salt .
|
||||
|
||||
if [ -d "/certs" ]; then
|
||||
sudo tar -czvf ${tmp_dir}/certs/certs.tar.gz -C /certs .
|
||||
else
|
||||
sudo tar -czvf ${tmp_dir}/certs/etc-letsencrypt.tar.gz -C /etc/letsencrypt .
|
||||
fi
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/certs/etc-letsencrypt.tar.gz -C /etc/letsencrypt .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/nginx/etc-nginx.tar.gz -C /etc/nginx .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/confd/etc-confd.tar.gz -C /etc/conf.d .
|
||||
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/celery-winupdate.service ${sysd}/meshcentral.service ${tmp_dir}/systemd/
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/celery-winupdate.service ${sysd}/meshcentral.service ${sysd}/nats.service ${tmp_dir}/systemd/
|
||||
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/
|
||||
|
||||
@@ -1,24 +1,21 @@
|
||||
MESH_HOST=mesh.example.com
|
||||
MESH_USER=mesh
|
||||
MESH_PASS=meshpass
|
||||
EMAIL_USER=admin@example.com
|
||||
IMAGE_REPO=tacticalrmm/
|
||||
VERSION=latest
|
||||
|
||||
# tactical credentials (Used to login to dashboard)
|
||||
TRMM_USER=tactical
|
||||
TRMM_PASS=tactical
|
||||
|
||||
# dns settings
|
||||
APP_HOST=app.example.com
|
||||
API_HOST=api.example.com
|
||||
MESH_HOST=mesh.example.com
|
||||
|
||||
# mesh settings
|
||||
MESH_USER=meshcentral
|
||||
MESH_PASS=meshcentralpass
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASS=pass
|
||||
POSTGRES_HOST=db
|
||||
|
||||
APP_HOST=app.example.com
|
||||
API_HOST=api.example.com
|
||||
|
||||
REDIS_HOST=redis
|
||||
|
||||
SALT_HOST=salt
|
||||
SALT_USER=saltapi
|
||||
SALT_PASS=password
|
||||
|
||||
ADMIN_URL=admin
|
||||
DJANGO_SEKRET=secret12341234123412341234
|
||||
DJANGO_DEBUG=False
|
||||
POSTGRES_PASS=postgrespass
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
user nginx;
|
||||
worker_processes 1;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
access_log /var/log/nginx/access.log main;
|
||||
sendfile on;
|
||||
keepalive_timeout 65;
|
||||
|
||||
server_tokens off;
|
||||
|
||||
upstream tacticalrmm {
|
||||
server unix:///app/tacticalrmm.sock;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
#server_name ${API_HOST};
|
||||
client_max_body_size 300M;
|
||||
access_log /var/log/nginx/api-access.log;
|
||||
error_log /var/log/nginx/api-error.log;
|
||||
|
||||
location /static/ {
|
||||
root /app;
|
||||
}
|
||||
|
||||
location /private/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias /app/tacticalrmm/private/;
|
||||
}
|
||||
|
||||
location /saltscripts/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias /srv/salt/scripts/userdefined/;
|
||||
}
|
||||
|
||||
location /builtin/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias /srv/salt/scripts/;
|
||||
}
|
||||
|
||||
location / {
|
||||
uwsgi_pass tacticalrmm;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
uwsgi_read_timeout 9999s;
|
||||
uwsgi_ignore_client_abort on;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
daemon off;
|
||||
@@ -1,46 +0,0 @@
|
||||
FROM tiangolo/uwsgi-nginx:python3.8
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ARG DJANGO_SEKRET
|
||||
ARG DJANGO_DEBUG
|
||||
ARG POSTGRES_USER
|
||||
ARG POSTGRES_PASS
|
||||
ARG POSTGRES_HOST
|
||||
ARG SALT_HOST
|
||||
ARG SALT_USER
|
||||
ARG SALT_PASS
|
||||
ARG REDIS_HOST
|
||||
ARG MESH_USER
|
||||
ARG MESH_HOST
|
||||
ARG MESH_TOKEN_KEY
|
||||
ARG APP_HOST
|
||||
ARG API_HOST
|
||||
ARG ADMIN_URL
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
RUN apt-get update && apt-get install -y gettext-base wget ca-certificates
|
||||
COPY ./api/tacticalrmm/requirements.txt .
|
||||
RUN pip install --upgrade pip
|
||||
RUN pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
RUN wget https://golang.org/dl/go1.15.linux-amd64.tar.gz -P /tmp
|
||||
COPY ./api/tacticalrmm/ .
|
||||
COPY ./scripts/ /scripts
|
||||
COPY ./docker/api/prestart.sh .
|
||||
COPY ./docker/api/uwsgi.ini .
|
||||
COPY ./docker/api/api.conf /app/api.conf.tmp
|
||||
COPY ./api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
RUN envsubst '\$APP_HOST, \$API_HOST' < /app/api.conf.tmp > /app/nginx.conf && \
|
||||
rm /app/api.conf.tmp
|
||||
COPY ./docker/api/local_settings.py.keep ./tacticalrmm/local_settings.py.tmp
|
||||
RUN envsubst < /app/tacticalrmm/local_settings.py.tmp > /app/tacticalrmm/local_settings.py && rm /app/tacticalrmm/local_settings.py.tmp
|
||||
|
||||
RUN tar -xzf /tmp/go1.15.linux-amd64.tar.gz -C /tmp && \
|
||||
mkdir /usr/local/rmmgo && \
|
||||
mv /tmp/go /usr/local/rmmgo/ && \
|
||||
rm -rf /tmp/go
|
||||
|
||||
RUN /usr/local/rmmgo/go/bin/go get github.com/josephspurrier/goversioninfo/cmd/goversioninfo && \
|
||||
chmod +x /usr/local/bin/goversioninfo
|
||||
@@ -1,47 +0,0 @@
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}']
|
||||
|
||||
ADMIN_URL = "${ADMIN_URL}"
|
||||
|
||||
CORS_ORIGIN_WHITELIST = ["https://${APP_HOST}",]
|
||||
|
||||
DEBUG = ${DJANGO_DEBUG}
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': 'tacticalrmm',
|
||||
'USER': '${POSTGRES_USER}',
|
||||
'PASSWORD': '${POSTGRES_PASS}',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '5432',
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': "%b-%d-%Y - %H:%M",
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
SALT_USERNAME = "${SALT_USER}"
|
||||
SALT_PASSWORD = "${SALT_PASS}"
|
||||
MESH_USERNAME = "${MESH_USER}"
|
||||
MESH_SITE = "https://${MESH_HOST}"
|
||||
MESH_WS_URL="ws://meshcentral:443"
|
||||
MESH_TOKEN_KEY = "${MESH_TOKEN_KEY}"
|
||||
REDIS_HOST = "${REDIS_HOST}"
|
||||
SALT_HOST = "${SALT_HOST}"
|
||||
@@ -1,10 +0,0 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
sleep 10
|
||||
python manage.py migrate --no-input
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py initial_db_setup
|
||||
python manage.py initial_mesh_setup
|
||||
python manage.py load_chocos
|
||||
python manage.py fix_salt_key
|
||||
python manage.py load_community_scripts
|
||||
@@ -1,14 +0,0 @@
|
||||
[uwsgi]
|
||||
|
||||
logto = /app/tacticalrmm/private/log/uwsgi.log
|
||||
chdir = /app
|
||||
wsgi-file = tacticalrmm/wsgi.py
|
||||
master = true
|
||||
processes = 4
|
||||
threads = 2
|
||||
socket = /app/tacticalrmm.sock
|
||||
# clear environment on exit
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
max-requests = 500
|
||||
max-requests-delta = 1000
|
||||
@@ -1,2 +0,0 @@
|
||||
PROD_URL = "https://${API_HOST}"
|
||||
DEV_URL = "https://${API_HOST}"
|
||||
@@ -1,16 +0,0 @@
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
#server_name ${APP_HOST};
|
||||
charset utf-8;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
try_files $uri $uri/ /index.html;
|
||||
add_header Cache-Control "no-store, no-cache, must-revalidate";
|
||||
add_header Pragma "no-cache";
|
||||
}
|
||||
|
||||
error_log /var/log/nginx/app-error.log;
|
||||
access_log /var/log/nginx/app-access.log;
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
FROM node:12-alpine AS builder
|
||||
ARG APP_HOST
|
||||
ARG API_HOST
|
||||
EXPOSE 80
|
||||
WORKDIR /home/node
|
||||
RUN apk add gettext
|
||||
COPY ./web/package.json .
|
||||
RUN npm install
|
||||
COPY ./docker/app/.env.keep /home/.env.tmp
|
||||
RUN envsubst '\$API_HOST' < /home/.env.tmp > /home/node/.env && rm /home/.env.tmp
|
||||
COPY ./docker/app/app.conf /home/node/app.conf.tmp
|
||||
RUN envsubst '\$APP_HOST' < /home/node/app.conf.tmp > /home/node/app.conf
|
||||
COPY ./web .
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:alpine
|
||||
WORKDIR /usr/share/nginx/html
|
||||
COPY --from=builder /home/node/dist .
|
||||
COPY --from=builder /home/node/app.conf /etc/nginx/conf.d/default.conf
|
||||
29
docker/containers/tactical-frontend/dockerfile
Normal file
29
docker/containers/tactical-frontend/dockerfile
Normal file
@@ -0,0 +1,29 @@
|
||||
FROM node:12-alpine AS builder
|
||||
|
||||
WORKDIR /home/node/app
|
||||
|
||||
COPY ./web/package.json .
|
||||
RUN npm install
|
||||
|
||||
COPY ./web .
|
||||
|
||||
# copy env file to set DOCKER_BUILD to true
|
||||
RUN echo "DOCKER_BUILD=1" > .env
|
||||
|
||||
# modify index.html template to allow injection of js variables at runtime
|
||||
RUN sed -i '/<\/head>/i <script src="\/env-config.js"><\/script>' src/index.template.html
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:stable-alpine
|
||||
|
||||
ENV PUBLIC_DIR /usr/share/nginx/html
|
||||
|
||||
RUN apk add --no-cache bash
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
COPY --from=builder /home/node/app/dist/ ${PUBLIC_DIR}
|
||||
|
||||
COPY docker/containers/tactical-frontend/entrypoint.sh /docker-entrypoint.d/
|
||||
RUN chmod +x /docker-entrypoint.d/entrypoint.sh
|
||||
|
||||
EXPOSE 80
|
||||
31
docker/containers/tactical-frontend/entrypoint.sh
Normal file
31
docker/containers/tactical-frontend/entrypoint.sh
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# https://www.freecodecamp.org/news/how-to-implement-runtime-environment-variables-with-create-react-app-docker-and-nginx-7f9d42a91d70/
|
||||
#
|
||||
|
||||
# Recreate js config file on start
|
||||
rm -rf ${PUBLIC_DIR}/env-config.js
|
||||
touch ${PUBLIC_DIR}/env-config.js
|
||||
|
||||
# Add runtime base url assignment
|
||||
echo "window._env_ = {PROD_URL: \"https://${API_HOST}\"}" >> ${PUBLIC_DIR}/env-config.js
|
||||
|
||||
nginx_config="$(cat << EOF
|
||||
server {
|
||||
listen 80;
|
||||
charset utf-8;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
try_files \$uri \$uri/ /index.html;
|
||||
add_header Cache-Control "no-store, no-cache, must-revalidate";
|
||||
add_header Pragma "no-cache";
|
||||
}
|
||||
|
||||
error_log /var/log/nginx/app-error.log;
|
||||
access_log /var/log/nginx/app-access.log;
|
||||
}
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${nginx_config}" > /etc/nginx/conf.d/default.conf
|
||||
21
docker/containers/tactical-meshcentral/dockerfile
Normal file
21
docker/containers/tactical-meshcentral/dockerfile
Normal file
@@ -0,0 +1,21 @@
|
||||
FROM node:12-alpine
|
||||
|
||||
WORKDIR /home/node/app
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
|
||||
RUN apk add --no-cache bash
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
COPY api/tacticalrmm/tacticalrmm/settings.py /tmp/settings.py
|
||||
|
||||
RUN grep -o 'MESH_VER.*' /tmp/settings.py | cut -d'"' -f 2 > /tmp/MESH_VER && \
|
||||
npm install meshcentral@$(cat /tmp/MESH_VER)
|
||||
|
||||
COPY docker/containers/tactical-meshcentral/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
EXPOSE 80 443
|
||||
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
72
docker/containers/tactical-meshcentral/entrypoint.sh
Normal file
72
docker/containers/tactical-meshcentral/entrypoint.sh
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
: "${MONGODB_USER:=mongouser}"
|
||||
: "${MONGODB_PASSWORD:=mongopass}"
|
||||
: "${MONGODB_HOST:=tactical-mongodb}"
|
||||
: "${MONGODB_PORT:=27017}"
|
||||
: "${NGINX_HOST_IP:=172.20.0.20}"
|
||||
|
||||
mkdir -p /home/node/app/meshcentral-data
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
|
||||
mesh_config="$(cat << EOF
|
||||
{
|
||||
"settings": {
|
||||
"mongodb": "mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@${MONGODB_HOST}:${MONGODB_PORT}",
|
||||
"Cert": "${MESH_HOST}",
|
||||
"TLSOffload": "${NGINX_HOST_IP}",
|
||||
"RedirPort": 80,
|
||||
"WANonly": true,
|
||||
"Minify": 1,
|
||||
"Port": 443,
|
||||
"AllowLoginToken": true,
|
||||
"AllowFraming": true,
|
||||
"_AgentPing": 60,
|
||||
"AgentPong": 300,
|
||||
"AllowHighQualityDesktop": true,
|
||||
"MaxInvalidLogin": {
|
||||
"time": 5,
|
||||
"count": 5,
|
||||
"coolofftime": 30
|
||||
}
|
||||
},
|
||||
"domains": {
|
||||
"": {
|
||||
"Title": "Tactical RMM",
|
||||
"Title2": "TacticalRMM",
|
||||
"NewAccounts": false,
|
||||
"mstsc": true,
|
||||
"GeoLocation": true,
|
||||
"CertUrl": "https://${NGINX_HOST_IP}:443",
|
||||
"httpheaders": {
|
||||
"Strict-Transport-Security": "max-age=360000",
|
||||
"_x-frame-options": "sameorigin",
|
||||
"Content-Security-Policy": "default-src 'none'; script-src 'self' 'unsafe-inline'; connect-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-src 'self'; media-src 'self'"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${mesh_config}" > /home/node/app/meshcentral-data/config.json
|
||||
|
||||
node node_modules/meshcentral --createaccount ${MESH_USER} --pass ${MESH_PASS} --email example@example.com
|
||||
node node_modules/meshcentral --adminaccount ${MESH_USER}
|
||||
|
||||
if [ ! -f "${TACTICAL_DIR}/tmp/mesh_token" ]; then
|
||||
node node_modules/meshcentral --logintokenkey > ${TACTICAL_DIR}/tmp/mesh_token
|
||||
fi
|
||||
|
||||
# wait for nginx container
|
||||
until (echo > /dev/tcp/"${NGINX_HOST_IP}"/443) &> /dev/null; do
|
||||
echo "waiting for nginx to start..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# start mesh
|
||||
node node_modules/meshcentral
|
||||
15
docker/containers/tactical-nats/dockerfile
Normal file
15
docker/containers/tactical-nats/dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM nats:2.1-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
RUN apk add --no-cache inotify-tools supervisor bash
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
COPY docker/containers/tactical-nats/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
|
||||
EXPOSE 4222
|
||||
40
docker/containers/tactical-nats/entrypoint.sh
Normal file
40
docker/containers/tactical-nats/entrypoint.sh
Normal file
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
|
||||
mkdir -p /var/log/supervisor
|
||||
mkdir -p /etc/supervisor/conf.d
|
||||
|
||||
# wait for config changes
|
||||
|
||||
|
||||
supervisor_config="$(cat << EOF
|
||||
[supervisord]
|
||||
nodaemon=true
|
||||
[include]
|
||||
files = /etc/supervisor/conf.d/*.conf
|
||||
|
||||
[program:nats-server]
|
||||
command=nats-server --config ${TACTICAL_DIR}/api/nats-rmm.conf
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
|
||||
[program:config-watcher]
|
||||
command=/bin/bash -c "inotifywait -mq -e modify "${TACTICAL_DIR}/api/nats-rmm.conf" | while read event; do nats-server --signal reload; done;"
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${supervisor_config}" > /etc/supervisor/conf.d/supervisor.conf
|
||||
|
||||
# run supervised processes
|
||||
/usr/bin/supervisord -c /etc/supervisor/conf.d/supervisor.conf
|
||||
12
docker/containers/tactical-nginx/dockerfile
Normal file
12
docker/containers/tactical-nginx/dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM nginx:stable-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
|
||||
RUN apk add --no-cache openssl bash
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
COPY docker/containers/tactical-nginx/entrypoint.sh /docker-entrypoint.d/
|
||||
RUN chmod +x /docker-entrypoint.d/entrypoint.sh
|
||||
|
||||
EXPOSE 443 80
|
||||
173
docker/containers/tactical-nginx/entrypoint.sh
Normal file
173
docker/containers/tactical-nginx/entrypoint.sh
Normal file
@@ -0,0 +1,173 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
CERT_PRIV_PATH=${TACTICAL_DIR}/certs/privkey.pem
|
||||
CERT_PUB_PATH=${TACTICAL_DIR}/certs/fullchain.pem
|
||||
|
||||
mkdir -p "${TACTICAL_DIR}/certs"
|
||||
|
||||
# remove default config
|
||||
rm -f /etc/nginx/conf.d/default.conf
|
||||
|
||||
# check for certificates in env variable
|
||||
if [ ! -z "$CERT_PRIV_KEY" ] && [ ! -z "$CERT_PUB_KEY" ]; then
|
||||
echo "${CERT_PRIV_KEY}" | base64 -d > ${CERT_PRIV_PATH}
|
||||
echo "${CERT_PUB_KEY}" | base64 -d > ${CERT_PUB_PATH}
|
||||
else
|
||||
# generate a self signed cert
|
||||
if [ ! -f "${CERT_PRIV_PATH}" ] || [ ! -f "${CERT_PUB_PATH}" ]; then
|
||||
rootdomain=$(echo ${API_HOST} | cut -d "." -f2- )
|
||||
openssl req -newkey rsa:4096 -x509 -sha256 -days 365 -nodes -out ${CERT_PUB_PATH} -keyout ${CERT_PRIV_PATH} -subj "/C=US/ST=Some-State/L=city/O=Internet Widgits Pty Ltd/CN=*.${rootdomain}"
|
||||
fi
|
||||
fi
|
||||
|
||||
nginx_config="$(cat << EOF
|
||||
# backend config
|
||||
server {
|
||||
resolver 127.0.0.11 valid=30s;
|
||||
|
||||
server_name ${API_HOST};
|
||||
|
||||
location / {
|
||||
#Using variable to disable start checks
|
||||
set \$api http://tactical-backend;
|
||||
|
||||
proxy_pass \$api;
|
||||
proxy_http_version 1.1;
|
||||
proxy_cache_bypass \$http_upgrade;
|
||||
|
||||
proxy_set_header Upgrade \$http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host \$host;
|
||||
proxy_set_header X-Real-IP \$remote_addr;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||
proxy_set_header X-Forwarded-Host \$host;
|
||||
proxy_set_header X-Forwarded-Port \$server_port;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
root ${TACTICAL_DIR}/api;
|
||||
}
|
||||
|
||||
location /private/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias ${TACTICAL_DIR}/api/tacticalrmm/private/;
|
||||
}
|
||||
|
||||
location /saltscripts/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias ${TACTICAL_DIR}/scripts/userdefined/;
|
||||
}
|
||||
|
||||
location /builtin/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias ${TACTICAL_DIR}/scripts/;
|
||||
}
|
||||
|
||||
error_log /var/log/nginx/api-error.log;
|
||||
access_log /var/log/nginx/api-access.log;
|
||||
|
||||
client_max_body_size 300M;
|
||||
|
||||
listen 443 ssl;
|
||||
ssl_certificate ${CERT_PUB_PATH};
|
||||
ssl_certificate_key ${CERT_PRIV_PATH};
|
||||
ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256';
|
||||
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name ${API_HOST};
|
||||
return 301 https://\$server_name\$request_uri;
|
||||
}
|
||||
|
||||
# frontend config
|
||||
server {
|
||||
resolver 127.0.0.11 valid=30s;
|
||||
|
||||
server_name ${APP_HOST};
|
||||
|
||||
location / {
|
||||
#Using variable to disable start checks
|
||||
set \$app http://tactical-frontend;
|
||||
|
||||
proxy_pass \$app;
|
||||
proxy_http_version 1.1;
|
||||
proxy_cache_bypass \$http_upgrade;
|
||||
|
||||
proxy_set_header Upgrade \$http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host \$host;
|
||||
proxy_set_header X-Real-IP \$remote_addr;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||
proxy_set_header X-Forwarded-Host \$host;
|
||||
proxy_set_header X-Forwarded-Port \$server_port;
|
||||
}
|
||||
|
||||
error_log /var/log/nginx/app-error.log;
|
||||
access_log /var/log/nginx/app-access.log;
|
||||
|
||||
listen 443 ssl;
|
||||
ssl_certificate ${CERT_PUB_PATH};
|
||||
ssl_certificate_key ${CERT_PRIV_PATH};
|
||||
ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256';
|
||||
|
||||
}
|
||||
|
||||
server {
|
||||
|
||||
listen 80;
|
||||
server_name ${APP_HOST};
|
||||
return 301 https://\$server_name\$request_uri;
|
||||
}
|
||||
|
||||
# meshcentral config
|
||||
server {
|
||||
resolver 127.0.0.11 valid=30s;
|
||||
|
||||
listen 443 ssl;
|
||||
proxy_send_timeout 330s;
|
||||
proxy_read_timeout 330s;
|
||||
server_name ${MESH_HOST};
|
||||
ssl_certificate ${CERT_PUB_PATH};
|
||||
ssl_certificate_key ${CERT_PRIV_PATH};
|
||||
ssl_session_cache shared:WEBSSL:10m;
|
||||
ssl_ciphers HIGH:!aNULL:!MD5;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
location / {
|
||||
#Using variable to disable start checks
|
||||
set \$meshcentral http://tactical-meshcentral:443;
|
||||
|
||||
proxy_pass \$meshcentral;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Upgrade \$http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
|
||||
proxy_set_header Host \$host;
|
||||
proxy_set_header X-Real-IP \$remote_addr;
|
||||
proxy_set_header X-Forwarded-Host \$host:\$server_port;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
resolver 127.0.0.11 valid=30s;
|
||||
|
||||
listen 80;
|
||||
server_name ${MESH_HOST};
|
||||
return 301 https://\$server_name\$request_uri;
|
||||
}
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${nginx_config}" > /etc/nginx/conf.d/default.conf
|
||||
24
docker/containers/tactical-salt/dockerfile
Normal file
24
docker/containers/tactical-salt/dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
FROM ubuntu:20.04
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV SALT_USER saltapi
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y ca-certificates wget gnupg2 tzdata supervisor && \
|
||||
wget -O - https://repo.saltstack.com/py3/ubuntu/20.04/amd64/latest/SALTSTACK-GPG-KEY.pub | apt-key add - && \
|
||||
echo 'deb http://repo.saltstack.com/py3/ubuntu/20.04/amd64/latest focal main' | tee /etc/apt/sources.list.d/saltstack.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y salt-master salt-api && \
|
||||
mkdir -p /var/log/supervisor && \
|
||||
sed -i 's/msgpack_kwargs = {"raw": six.PY2}/msgpack_kwargs = {"raw": six.PY2, "max_buffer_size": 2147483647}/g' /usr/lib/python3/dist-packages/salt/transport/ipc.py && \
|
||||
adduser --no-create-home --disabled-password --gecos "" ${SALT_USER}
|
||||
|
||||
EXPOSE 8123 4505 4506
|
||||
|
||||
COPY docker/containers/tactical-salt/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
64
docker/containers/tactical-salt/entrypoint.sh
Normal file
64
docker/containers/tactical-salt/entrypoint.sh
Normal file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${SALT_USER:='saltapi'}"
|
||||
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
|
||||
SALT_PASS=$(cat ${TACTICAL_DIR}/tmp/salt_pass)
|
||||
|
||||
echo "${SALT_USER}:${SALT_PASS}" | chpasswd
|
||||
|
||||
cherrypy_config="$(cat << EOF
|
||||
file_roots:
|
||||
base:
|
||||
- /srv/salt
|
||||
- ${TACTICAL_DIR}
|
||||
timeout: 20
|
||||
gather_job_timeout: 25
|
||||
max_event_size: 30485760
|
||||
external_auth:
|
||||
pam:
|
||||
${SALT_USER}:
|
||||
- .*
|
||||
- '@runner'
|
||||
- '@wheel'
|
||||
- '@jobs'
|
||||
rest_cherrypy:
|
||||
port: 8123
|
||||
disable_ssl: True
|
||||
max_request_body_size: 30485760
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${cherrypy_config}" > /etc/salt/master.d/rmm-salt.conf
|
||||
|
||||
supervisor_config="$(cat << EOF
|
||||
[supervisord]
|
||||
nodaemon=true
|
||||
[include]
|
||||
files = /etc/supervisor/conf.d/*.conf
|
||||
|
||||
[program:salt-master]
|
||||
command=/bin/bash -c "salt-master -l info"
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
|
||||
[program:salt-api]
|
||||
command=/bin/bash -c "salt-api -l info"
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${supervisor_config}" > /etc/supervisor/conf.d/supervisor.conf
|
||||
|
||||
# run salt and salt master
|
||||
/usr/bin/supervisord
|
||||
69
docker/containers/tactical/dockerfile
Normal file
69
docker/containers/tactical/dockerfile
Normal file
@@ -0,0 +1,69 @@
|
||||
# creates python virtual env
|
||||
FROM python:3.8-slim AS CREATE_VENV_STAGE
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# # set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_TMP_DIR /tmp/tactical
|
||||
RUN python3 -m venv $VIRTUAL_ENV
|
||||
ENV PATH "${VIRTUAL_ENV}/bin:$PATH"
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
COPY api/tacticalrmm/requirements.txt ${TACTICAL_TMP_DIR}/api/requirements.txt
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends gcc libc6-dev && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
pip install --upgrade pip && \
|
||||
pip install --no-cache-dir setuptools wheel gunicorn && \
|
||||
sed -i '/uWSGI/d' ${TACTICAL_TMP_DIR}/api/requirements.txt && \
|
||||
pip install --no-cache-dir -r ${TACTICAL_TMP_DIR}/api/requirements.txt
|
||||
|
||||
|
||||
# runtime image
|
||||
FROM python:3.8-slim
|
||||
|
||||
# set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_TMP_DIR /tmp/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV TACTICAL_USER tactical
|
||||
ENV PATH "${VIRTUAL_ENV}/bin:${TACTICAL_GO_DIR}/go/bin:$PATH"
|
||||
|
||||
# copy files from repo
|
||||
COPY api/tacticalrmm ${TACTICAL_TMP_DIR}/api
|
||||
COPY scripts ${TACTICAL_TMP_DIR}/scripts
|
||||
COPY _modules ${TACTICAL_TMP_DIR}/_modules
|
||||
|
||||
# copy go install from build stage
|
||||
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
COPY --from=CREATE_VENV_STAGE ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
|
||||
# install deps
|
||||
RUN apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
go get github.com/josephspurrier/goversioninfo/cmd/goversioninfo && \
|
||||
groupadd -g 1000 "${TACTICAL_USER}" && \
|
||||
useradd -M -d "${TACTICAL_DIR}" -s /bin/bash -u 1000 -g 1000 "${TACTICAL_USER}"
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
# overwrite goversioninfo file
|
||||
COPY api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
RUN chmod +x /usr/local/bin/goversioninfo
|
||||
|
||||
# docker init
|
||||
COPY docker/containers/tactical/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${TACTICAL_DIR}/api
|
||||
|
||||
EXPOSE 80
|
||||
183
docker/containers/tactical/entrypoint.sh
Normal file
183
docker/containers/tactical/entrypoint.sh
Normal file
@@ -0,0 +1,183 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${TRMM_USER:=tactical}"
|
||||
: "${TRMM_PASS:=tactical}"
|
||||
: "${POSTGRES_HOST:=tactical-postgres}"
|
||||
: "${POSTGRES_PORT:=5432}"
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${SALT_HOST:=tactical-salt}"
|
||||
: "${SALT_USER:=saltapi}"
|
||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
: "${MESH_HOST:=tactical-meshcentral}"
|
||||
: "${API_HOST:=tactical-backend}"
|
||||
: "${APP_HOST:=tactical-frontend}"
|
||||
: "${REDIS_HOST:=tactical-redis}"
|
||||
|
||||
|
||||
function check_tactical_ready {
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
}
|
||||
|
||||
# tactical-init
|
||||
if [ "$1" = 'tactical-init' ]; then
|
||||
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
mkdir -p ${TACTICAL_DIR}/scripts/userdefined
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
# copy container data to volume
|
||||
cp -af ${TACTICAL_TMP_DIR}/. ${TACTICAL_DIR}/
|
||||
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# configure django settings
|
||||
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||
ADMINURL=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 70 | head -n 1)
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
# write salt pass to tmp dir
|
||||
if [ ! -f "${TACTICAL__DIR}/tmp/salt_pass" ]; then
|
||||
SALT_PASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||
echo "${SALT_PASS}" > ${TACTICAL_DIR}/tmp/salt_pass
|
||||
else
|
||||
SALT_PASS=$(cat ${TACTICAL_DIR}/tmp/salt_pass)
|
||||
fi
|
||||
|
||||
localvars="$(cat << EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
DEBUG = False
|
||||
|
||||
DOCKER_BUILD = True
|
||||
|
||||
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||
|
||||
SCRIPTS_DIR = '/opt/tactical/scripts'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}']
|
||||
|
||||
ADMIN_URL = '${ADMINURL}/'
|
||||
|
||||
CORS_ORIGIN_WHITELIST = [
|
||||
'https://${APP_HOST}'
|
||||
]
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': '${POSTGRES_DB}',
|
||||
'USER': '${POSTGRES_USER}',
|
||||
'PASSWORD': '${POSTGRES_PASS}',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
SALT_USERNAME = '${SALT_USER}'
|
||||
SALT_PASSWORD = '${SALT_PASS}'
|
||||
SALT_HOST = '${SALT_HOST}'
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
MESH_WS_URL = 'ws://${MESH_CONTAINER}:443'
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${localvars}" > ${TACTICAL_DIR}/api/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
python manage.py migrate --no-input
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py initial_db_setup
|
||||
python manage.py initial_mesh_setup
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
python manage.py reload_nats
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||
|
||||
# create install ready file
|
||||
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
|
||||
|
||||
fi
|
||||
|
||||
# backend container
|
||||
if [ "$1" = 'tactical-backend' ]; then
|
||||
check_tactical_ready
|
||||
|
||||
# Prepare log files and start outputting logs to stdout
|
||||
mkdir -p ${TACTICAL_DIR}/api/tacticalrmm/logs
|
||||
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log
|
||||
touch ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log
|
||||
tail -n 0 -f ${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn*.log &
|
||||
|
||||
export DJANGO_SETTINGS_MODULE=tacticalrmm.settings
|
||||
|
||||
exec gunicorn tacticalrmm.wsgi:application \
|
||||
--name tactical-backend \
|
||||
--bind 0.0.0.0:80 \
|
||||
--workers 5 \
|
||||
--log-level=info \
|
||||
--log-file=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn.log \
|
||||
--access-logfile=${TACTICAL_DIR}/api/tacticalrmm/logs/gunicorn-access.log \
|
||||
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery' ]; then
|
||||
check_tactical_ready
|
||||
celery -A tacticalrmm worker
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerybeat' ]; then
|
||||
check_tactical_ready
|
||||
test -f "${TACTICAL_DIR}/api/celerybeat.pid" && rm "${TACTICAL_DIR}/api/celerybeat.pid"
|
||||
celery -A tacticalrmm beat
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerywinupdate' ]; then
|
||||
check_tactical_ready
|
||||
celery -A tacticalrmm worker -Q wupdate
|
||||
fi
|
||||
@@ -1,99 +0,0 @@
|
||||
# FOR DEV
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
# Container that hosts Vue frontend
|
||||
app:
|
||||
image: node:12
|
||||
command: /bin/bash -c "npm install && npm run serve -- --host 0.0.0.0 --port 80 --public ${APP_HOST}"
|
||||
working_dir: /home/node
|
||||
volumes:
|
||||
- ../web:/home/node
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
# Builds Python Virtual Env to share between containers
|
||||
venv:
|
||||
image: python:3.8
|
||||
command: /bin/bash -c "pip install virtualenv && python -m virtualenv env && ./env/bin/pip install -r requirements.txt && ./env/bin/pip install -r requirements-dev.txt"
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ../api/tacticalrmm:/app
|
||||
|
||||
# Container for Django backend
|
||||
api:
|
||||
image: python:3.8
|
||||
command: /bin/bash -c "python manage.py collectstatic --clear --no-input && python manage.py migrate && sleep 10s && python manage.py initial_db_setup && python manage.py initial_mesh_setup && python manage.py load_chocos && python manage.py runserver 0.0.0.0:80"
|
||||
working_dir: /app
|
||||
environment:
|
||||
VIRTUAL_ENV: /app/env
|
||||
PATH: /app/env/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
networks:
|
||||
- proxy
|
||||
- database
|
||||
- redis
|
||||
volumes:
|
||||
- scripts:/srv
|
||||
- mesh_token:/token
|
||||
- ../api/tacticalrmm:/app
|
||||
depends_on:
|
||||
- db
|
||||
- venv
|
||||
- meshcentral
|
||||
|
||||
# Container for Celery worker service
|
||||
celery-service:
|
||||
image: python:3.8
|
||||
command: celery -A tacticalrmm worker -l debug
|
||||
working_dir: /app
|
||||
environment:
|
||||
VIRTUAL_ENV: /app/env
|
||||
PATH: /app/env/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
volumes:
|
||||
- ../api/tacticalrmm:/app
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- database
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- venv
|
||||
|
||||
# Container for Celery beat service
|
||||
celery-beat:
|
||||
image: python:3.8
|
||||
command: celery -A tacticalrmm beat -l debug
|
||||
working_dir: /app
|
||||
environment:
|
||||
VIRTUAL_ENV: /app/env
|
||||
PATH: /app/env/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
volumes:
|
||||
- ../api/tacticalrmm:/app
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- database
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- venv
|
||||
|
||||
# Container for Celery Winupdate tasks
|
||||
celery-winupdate:
|
||||
image: python:3.8
|
||||
command: celery -A tacticalrmm worker -Q wupdate -l debug
|
||||
working_dir: /app
|
||||
environment:
|
||||
VIRTUAL_ENV: /app/env
|
||||
PATH: /app/env/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
volumes:
|
||||
- ../api/tacticalrmm:/app
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- database
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- venv
|
||||
@@ -1,143 +0,0 @@
|
||||
# FOR PROD
|
||||
version: "3.7"
|
||||
|
||||
volumes:
|
||||
# Gives access to the debug log for celery tasks
|
||||
debug_log:
|
||||
|
||||
services:
|
||||
# Container that hosts Vue frontend
|
||||
app:
|
||||
build:
|
||||
context: ..
|
||||
args:
|
||||
- APP_HOST=${APP_HOST}
|
||||
- API_HOST=${API_HOST}
|
||||
dockerfile: "./docker/app/dockerfile"
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
# Container for Django backend
|
||||
api:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: "./docker/api/dockerfile"
|
||||
args:
|
||||
- DJANGO_SEKRET=${DJANGO_SEKRET}
|
||||
- DJANGO_DEBUG=${DJANGO_DEBUG}
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASS=${POSTGRES_PASS}
|
||||
- POSTGRES_HOST=${POSTGRES_HOST}
|
||||
- SALT_PASS=${SALT_PASS}
|
||||
- SALT_USER=${SALT_USER}
|
||||
- SALT_HOST=${SALT_HOST}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- MESH_USER=${MESH_USER}
|
||||
- MESH_HOST=${MESH_HOST}
|
||||
- APP_HOST=${APP_HOST}
|
||||
- API_HOST=${API_HOST}
|
||||
- ADMIN_URL=${ADMIN_URL}
|
||||
networks:
|
||||
- proxy
|
||||
- database
|
||||
- redis
|
||||
volumes:
|
||||
- scripts:/srv
|
||||
- mesh_token:/token
|
||||
- debug_log:/app/tacticalrmm/private/log
|
||||
depends_on:
|
||||
- db
|
||||
- meshcentral
|
||||
|
||||
# Container for Celery worker service
|
||||
celery-service:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: "./docker/api/dockerfile"
|
||||
args:
|
||||
- DJANGO_SEKRET=${DJANGO_SEKRET}
|
||||
- DJANGO_DEBUG=${DJANGO_DEBUG}
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASS=${POSTGRES_PASS}
|
||||
- POSTGRES_HOST=${POSTGRES_HOST}
|
||||
- SALT_PASS=${SALT_PASS}
|
||||
- SALT_USER=${SALT_USER}
|
||||
- SALT_HOST=${SALT_HOST}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- MESH_USER=${MESH_USER}
|
||||
- MESH_HOST=${MESH_HOST}
|
||||
- APP_HOST=${APP_HOST}
|
||||
- API_HOST=${API_HOST}
|
||||
- ADMIN_URL=${ADMIN_URL}
|
||||
command: celery -A tacticalrmm worker -l debug
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- database
|
||||
volumes:
|
||||
- debug_log:/app/tacticalrmm/private/log
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
|
||||
# Container for Celery beat service
|
||||
celery-beat:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: "./docker/api/dockerfile"
|
||||
args:
|
||||
- DJANGO_SEKRET=${DJANGO_SEKRET}
|
||||
- DJANGO_DEBUG=${DJANGO_DEBUG}
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASS=${POSTGRES_PASS}
|
||||
- POSTGRES_HOST=${POSTGRES_HOST}
|
||||
- SALT_PASS=${SALT_PASS}
|
||||
- SALT_USER=${SALT_USER}
|
||||
- SALT_HOST=${SALT_HOST}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- MESH_USER=${MESH_USER}
|
||||
- MESH_HOST=${MESH_HOST}
|
||||
- APP_HOST=${APP_HOST}
|
||||
- API_HOST=${API_HOST}
|
||||
- ADMIN_URL=${ADMIN_URL}
|
||||
command: celery -A tacticalrmm beat -l debug
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- database
|
||||
volumes:
|
||||
- debug_log:/app/tacticalrmm/private/log
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
|
||||
# Container for Celery Winupdate tasks
|
||||
celery-winupdate:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: "./docker/api/dockerfile"
|
||||
args:
|
||||
- DJANGO_SEKRET=${DJANGO_SEKRET}
|
||||
- DJANGO_DEBUG=${DJANGO_DEBUG}
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASS=${POSTGRES_PASS}
|
||||
- POSTGRES_HOST=${POSTGRES_HOST}
|
||||
- SALT_PASS=${SALT_PASS}
|
||||
- SALT_USER=${SALT_USER}
|
||||
- SALT_HOST=${SALT_HOST}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- MESH_USER=${MESH_USER}
|
||||
- MESH_HOST=${MESH_HOST}
|
||||
- APP_HOST=${APP_HOST}
|
||||
- API_HOST=${API_HOST}
|
||||
- ADMIN_URL=${ADMIN_URL}
|
||||
command: celery -A tacticalrmm worker -Q wupdate -l debug
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- database
|
||||
volumes:
|
||||
- debug_log:/app/tacticalrmm/private/log
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
@@ -1,6 +1,6 @@
|
||||
version: "3.7"
|
||||
|
||||
# Userdefined Networks
|
||||
# networks
|
||||
networks:
|
||||
proxy:
|
||||
driver: bridge
|
||||
@@ -8,102 +8,205 @@ networks:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.20.0.0/24
|
||||
database:
|
||||
api-db:
|
||||
redis:
|
||||
mesh-mongodb:
|
||||
mesh-db:
|
||||
|
||||
# Docker managed persistent volumes
|
||||
# docker managed persistent volumes
|
||||
volumes:
|
||||
# Volume for userdefined scripts
|
||||
scripts:
|
||||
# Volume for mesh token initial setup
|
||||
mesh_token:
|
||||
# Used to make the salt data persistent
|
||||
tactical_data:
|
||||
salt_data:
|
||||
# Makes Postgres data persistent
|
||||
postgres_data13:
|
||||
# Makes mesh central data persistent
|
||||
postgres_data:
|
||||
mongo_data:
|
||||
mesh_data:
|
||||
|
||||
services:
|
||||
# Postgres Database for API service
|
||||
db:
|
||||
image: postgres:13
|
||||
# postgres database for api service
|
||||
tactical-postgres:
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_DB: tacticalrmm
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||
volumes:
|
||||
- postgres_data13:/var/lib/postgresql/data
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- database
|
||||
- api-db
|
||||
|
||||
# Redis Container for Celery tasks
|
||||
redis:
|
||||
image: redis
|
||||
# redis container for celery tasks
|
||||
tactical-redis:
|
||||
image: redis:6.0-alpine
|
||||
restart: always
|
||||
networks:
|
||||
- redis
|
||||
|
||||
# Salt Master and API
|
||||
salt:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: ./docker/salt/dockerfile
|
||||
args:
|
||||
- SALT_USER=${SALT_USER}
|
||||
- SALT_PASS=${SALT_PASS}
|
||||
# used to initialize the docker environment
|
||||
tactical-init:
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
restart: on-failure
|
||||
command: ["tactical-init"]
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
TRMM_USER: ${TRMM_USER}
|
||||
TRMM_PASS: ${TRMM_PASS}
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-meshcentral
|
||||
networks:
|
||||
- api-db
|
||||
- proxy
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
|
||||
# salt master and api
|
||||
tactical-salt:
|
||||
image: ${IMAGE_REPO}tactical-salt:${VERSION}
|
||||
restart: always
|
||||
ports:
|
||||
- "4505:4505"
|
||||
- "4506:4506"
|
||||
volumes:
|
||||
- scripts:/srv
|
||||
- tactical_data:/opt/tactical
|
||||
- salt_data:/etc/salt
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
# MeshCentral Container
|
||||
meshcentral:
|
||||
build:
|
||||
context: ./meshcentral
|
||||
args:
|
||||
- MESH_HOST=${MESH_HOST}
|
||||
- MESH_USER=${MESH_USER}
|
||||
- MESH_PASS=${MESH_PASS}
|
||||
- EMAIL_USER=${EMAIL_USER}
|
||||
- MONGODB_USER=${MONGODB_USER}
|
||||
- MONGODB_PASSWORD=${MONGODB_PASSWORD}
|
||||
networks:
|
||||
- proxy
|
||||
- mesh-mongodb
|
||||
|
||||
# nats
|
||||
tactical-nats:
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
ports:
|
||||
- "4222:4222"
|
||||
volumes:
|
||||
- mesh_token:/token
|
||||
depends_on:
|
||||
- mesh-mongodb
|
||||
- nginx-proxy
|
||||
- tactical_data:/opt/tactical
|
||||
networks:
|
||||
proxy:
|
||||
aliases:
|
||||
- ${API_HOST}
|
||||
|
||||
# MongoDB Container for MeshCentral
|
||||
mesh-mongodb:
|
||||
image: mongo
|
||||
# meshcentral container
|
||||
tactical-meshcentral:
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
networks:
|
||||
proxy:
|
||||
aliases:
|
||||
- ${MESH_HOST}
|
||||
mesh-db:
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
- mesh_data:/home/node/app/meshcentral-data
|
||||
depends_on:
|
||||
- tactical-mongodb
|
||||
|
||||
# mongodb container for meshcentral
|
||||
tactical-mongodb:
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MONGO_INITDB_DATABASE: meshcentral
|
||||
networks:
|
||||
- mesh-mongodb
|
||||
- mesh-db
|
||||
volumes:
|
||||
- mongo_data:/data
|
||||
- mongo_data:/data/db
|
||||
|
||||
# Nginx Container Reverse Proxy that handles all http/https traffic
|
||||
nginx-proxy:
|
||||
build:
|
||||
context: ./nginx-proxy
|
||||
args:
|
||||
- APP_HOST=${APP_HOST}
|
||||
- API_HOST=${API_HOST}
|
||||
- MESH_HOST=${MESH_HOST}
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
# container that hosts vue frontend
|
||||
tactical-frontend:
|
||||
image: ${IMAGE_REPO}tactical-frontend:${VERSION}
|
||||
restart: always
|
||||
networks:
|
||||
- proxy
|
||||
environment:
|
||||
API_HOST: ${API_HOST}
|
||||
|
||||
# container for django backend
|
||||
tactical-backend:
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-backend"]
|
||||
restart: always
|
||||
networks:
|
||||
- proxy
|
||||
- api-db
|
||||
- redis
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
|
||||
tactical-nginx:
|
||||
# container for tactical reverse proxy
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||
networks:
|
||||
proxy:
|
||||
ipv4_address: 172.20.0.20
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
|
||||
# container for celery worker service
|
||||
tactical-celery:
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celery"]
|
||||
restart: always
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- api-db
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-redis
|
||||
|
||||
# container for celery beat service
|
||||
tactical-celerybeat:
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celerybeat"]
|
||||
restart: always
|
||||
networks:
|
||||
- proxy
|
||||
- redis
|
||||
- api-db
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-redis
|
||||
|
||||
# container for celery winupdate tasks
|
||||
tactical-celerywinupdate:
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celerywinupdate"]
|
||||
restart: always
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- api-db
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-redis
|
||||
|
||||
13
docker/image-build.sh
Executable file
13
docker/image-build.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
||||
DOCKER_IMAGES="tactical tactical-frontend tactical-nginx tactical-meshcentral tactical-salt tactical-nats"
|
||||
|
||||
cd ..
|
||||
|
||||
for DOCKER_IMAGE in ${DOCKER_IMAGES}; do
|
||||
echo "Building Tactical Image: ${DOCKER_IMAGE}..."
|
||||
docker build --pull --no-cache -t "${DOCKER_IMAGE}" -f "docker/containers/${DOCKER_IMAGE}/dockerfile" .
|
||||
done
|
||||
@@ -1,36 +0,0 @@
|
||||
{
|
||||
"settings": {
|
||||
"mongodb": "mongodb://${MONGODB_USER}:${MONGODB_PASSWORD}@mesh-mongodb:27017",
|
||||
"Cert": "${MESH_HOST}",
|
||||
"TLSOffload": "172.20.0.20",
|
||||
"RedirPort": 80,
|
||||
"WANonly": true,
|
||||
"Minify": 1,
|
||||
"Port": 443,
|
||||
"AllowLoginToken": true,
|
||||
"AllowFraming": true,
|
||||
"_AgentPing": 60,
|
||||
"AgentPong": 300,
|
||||
"AllowHighQualityDesktop": true,
|
||||
"MaxInvalidLogin": {
|
||||
"time": 5,
|
||||
"count": 5,
|
||||
"coolofftime": 30
|
||||
}
|
||||
},
|
||||
"domains": {
|
||||
"": {
|
||||
"Title": "Dev RMM",
|
||||
"Title2": "DevRMM",
|
||||
"NewAccounts": false,
|
||||
"mstsc": true,
|
||||
"GeoLocation": true,
|
||||
"CertUrl": "https://172.20.0.20:443",
|
||||
"httpheaders": {
|
||||
"Strict-Transport-Security": "max-age=360000",
|
||||
"_x-frame-options": "sameorigin",
|
||||
"Content-Security-Policy": "default-src 'none'; script-src 'self' 'unsafe-inline'; connect-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-src 'self'; media-src 'self'"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user