Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5c28de03f | ||
|
|
e17d25c156 | ||
|
|
c25dc1b99c | ||
|
|
a493a574bd | ||
|
|
4284493dce | ||
|
|
25059de8e1 | ||
|
|
1731b05ad0 | ||
|
|
e80dc663ac | ||
|
|
39988a4c2f | ||
|
|
415bff303a | ||
|
|
a65eb62a54 | ||
|
|
03b2982128 | ||
|
|
bff0527857 | ||
|
|
f3b7634254 | ||
|
|
6a9593c0b9 | ||
|
|
edb785b8e5 | ||
|
|
26d757b50a | ||
|
|
535079ee87 | ||
|
|
ac380c29c1 | ||
|
|
3fd212f26c | ||
|
|
04a3abc651 | ||
|
|
6caf85ddd1 | ||
|
|
16e4071508 | ||
|
|
69e7c4324b | ||
|
|
a1c4a8cbe5 | ||
|
|
e37f6cfda7 | ||
|
|
989c804409 | ||
|
|
7345bc3c82 | ||
|
|
69bee35700 | ||
|
|
598e24df7c | ||
|
|
0ae669201e | ||
|
|
f52a8a4642 | ||
|
|
9c40b61ef2 | ||
|
|
72dabcda83 | ||
|
|
161a06dbcc | ||
|
|
8ed3d4e70c | ||
|
|
a4223ccc8a | ||
|
|
ca85923855 | ||
|
|
52bfe7c493 | ||
|
|
4786bd0cbe | ||
|
|
cadab160ff | ||
|
|
6a7f17b2b0 | ||
|
|
4986a4d775 | ||
|
|
903af0c2cf | ||
|
|
3282fa803c | ||
|
|
67cc47608d | ||
|
|
0411704b8b | ||
|
|
1de85b2c69 | ||
|
|
33b012f29d | ||
|
|
1357584df3 | ||
|
|
e15809e271 | ||
|
|
0da1950427 | ||
|
|
e590b921be | ||
|
|
aba1662631 | ||
|
|
61eeb60c19 | ||
|
|
2655964113 |
@@ -32,21 +32,6 @@ services:
|
||||
aliases:
|
||||
- tactical-frontend
|
||||
|
||||
# salt master and api
|
||||
salt-dev:
|
||||
image: ${IMAGE_REPO}tactical-salt:${VERSION}
|
||||
restart: always
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- salt-data-dev:/etc/salt
|
||||
ports:
|
||||
- "4505:4505"
|
||||
- "4506:4506"
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-salt
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
@@ -188,23 +173,6 @@ services:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for celery winupdate tasks
|
||||
celerywinupdate-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celerywinupdate-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
nginx-dev:
|
||||
# container for tactical reverse proxy
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
@@ -231,7 +199,6 @@ volumes:
|
||||
postgres-data-dev:
|
||||
mongo-dev-data:
|
||||
mesh-data-dev:
|
||||
salt-data-dev:
|
||||
|
||||
networks:
|
||||
dev:
|
||||
|
||||
@@ -9,8 +9,6 @@ set -e
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${SALT_HOST:=tactical-salt}"
|
||||
: "${SALT_USER:=saltapi}"
|
||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
@@ -50,14 +48,6 @@ function django_setup {
|
||||
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
# write salt pass to tmp dir
|
||||
if [ ! -f "${TACTICAL__DIR}/tmp/salt_pass" ]; then
|
||||
SALT_PASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||
echo "${SALT_PASS}" > ${TACTICAL_DIR}/tmp/salt_pass
|
||||
else
|
||||
SALT_PASS=$(cat ${TACTICAL_DIR}/tmp/salt_pass)
|
||||
fi
|
||||
|
||||
localvars="$(cat << EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
@@ -106,9 +96,6 @@ if not DEBUG:
|
||||
)
|
||||
})
|
||||
|
||||
SALT_USERNAME = '${SALT_USER}'
|
||||
SALT_PASSWORD = '${SALT_PASS}'
|
||||
SALT_HOST = '${SALT_HOST}'
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
@@ -180,8 +167,3 @@ if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
env/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerywinupdate-dev' ]; then
|
||||
check_tactical_ready
|
||||
env/bin/celery -A tacticalrmm worker -Q wupdate -l debug
|
||||
fi
|
||||
|
||||
10
.github/workflows/docker-build-push.yml
vendored
10
.github/workflows/docker-build-push.yml
vendored
@@ -57,16 +57,6 @@ jobs:
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
- name: Build and Push Tactical Salt Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-salt/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-salt:${{ steps.prep.outputs.version }},tacticalrmm/tactical-salt:latest
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
[](https://github.com/python/black)
|
||||
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
@@ -62,7 +62,6 @@ sudo ufw default allow outgoing
|
||||
sudo ufw allow ssh
|
||||
sudo ufw allow http
|
||||
sudo ufw allow https
|
||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
||||
sudo ufw allow proto tcp from any to any port 4222
|
||||
sudo ufw enable && sudo ufw reload
|
||||
```
|
||||
|
||||
@@ -1,457 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
import psutil
|
||||
import os
|
||||
import datetime
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import wmi
|
||||
import win32evtlog
|
||||
import win32con
|
||||
import win32evtlogutil
|
||||
import winerror
|
||||
from time import sleep
|
||||
import requests
|
||||
import subprocess
|
||||
import random
|
||||
import platform
|
||||
|
||||
ARCH = "64" if platform.machine().endswith("64") else "32"
|
||||
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
|
||||
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
|
||||
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
|
||||
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
|
||||
SYS_DRIVE = os.environ["SystemDrive"]
|
||||
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
|
||||
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
|
||||
|
||||
|
||||
def get_services():
|
||||
# see https://github.com/wh1te909/tacticalrmm/issues/38
|
||||
# for why I am manually implementing the svc.as_dict() method of psutil
|
||||
ret = []
|
||||
for svc in psutil.win_service_iter():
|
||||
i = {}
|
||||
try:
|
||||
i["display_name"] = svc.display_name()
|
||||
i["binpath"] = svc.binpath()
|
||||
i["username"] = svc.username()
|
||||
i["start_type"] = svc.start_type()
|
||||
i["status"] = svc.status()
|
||||
i["pid"] = svc.pid()
|
||||
i["name"] = svc.name()
|
||||
i["description"] = svc.description()
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
ret.append(i)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def run_python_script(filename, timeout, script_type="userdefined"):
|
||||
# no longer used in agent version 0.11.0
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
if script_type == "userdefined":
|
||||
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
|
||||
else:
|
||||
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
|
||||
|
||||
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
|
||||
if shell == "powershell" or shell == "cmd":
|
||||
if args:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath,
|
||||
args=" ".join(map(lambda x: f'"{x}"', args)),
|
||||
shell=shell,
|
||||
timeout=timeout,
|
||||
bg=bg,
|
||||
)
|
||||
else:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath, shell=shell, timeout=timeout, bg=bg
|
||||
)
|
||||
|
||||
elif shell == "python":
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
__salt__["cp.get_file"](filepath, file_path)
|
||||
|
||||
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
|
||||
|
||||
if args:
|
||||
a = " ".join(map(lambda x: f'"{x}"', args))
|
||||
cmd = f"{PY_BIN} {file_path} {a}"
|
||||
return __salt__[salt_cmd](cmd, timeout=timeout)
|
||||
else:
|
||||
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def uninstall_agent():
|
||||
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
|
||||
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def update_salt():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
cmd = [TAC_RMM, "-m", "updatesalt"]
|
||||
p = Popen(
|
||||
cmd,
|
||||
stdin=PIPE,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
close_fds=True,
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
)
|
||||
return p.pid
|
||||
|
||||
|
||||
def run_manual_checks():
|
||||
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def install_updates():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
|
||||
|
||||
|
||||
def _wait_for_service(svc, status, retries=10):
|
||||
attempts = 0
|
||||
while 1:
|
||||
try:
|
||||
service = psutil.win_service_get(svc)
|
||||
except psutil.NoSuchProcess:
|
||||
stat = "fail"
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
stat = service.status()
|
||||
if stat != status:
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts == 0 or attempts > retries:
|
||||
break
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
def agent_update_v2(inno, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
|
||||
# so if more than 2 running, don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update_v2" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 20)) # don't flood the rmm
|
||||
|
||||
exe = os.path.join(TEMP_DIR, inno)
|
||||
|
||||
if os.path.exists(exe):
|
||||
try:
|
||||
os.remove(exe)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
|
||||
|
||||
tac = _wait_for_service(svc="tacticalagent", status="running")
|
||||
if tac != "running":
|
||||
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
|
||||
|
||||
chk = _wait_for_service(svc="checkrunner", status="running")
|
||||
if chk != "running":
|
||||
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update_v2(inno, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update_v2",
|
||||
f"inno={inno}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def agent_update(version, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself so if more than 2 running,
|
||||
# don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 60)) # don't flood the rmm
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
services = ("tacticalagent", "checkrunner")
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "stop", svc], timeout=120)
|
||||
|
||||
sleep(10)
|
||||
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
|
||||
sleep(30)
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "start", svc], timeout=120)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update(version, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update",
|
||||
f"version={version}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SystemDetail:
|
||||
def __init__(self):
|
||||
self.c = wmi.WMI()
|
||||
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
|
||||
self.comp_sys = self.c.Win32_ComputerSystem()
|
||||
self.memory = self.c.Win32_PhysicalMemory()
|
||||
self.os = self.c.Win32_OperatingSystem()
|
||||
self.base_board = self.c.Win32_BaseBoard()
|
||||
self.bios = self.c.Win32_BIOS()
|
||||
self.disk = self.c.Win32_DiskDrive()
|
||||
self.network_adapter = self.c.Win32_NetworkAdapter()
|
||||
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
|
||||
self.desktop_monitor = self.c.Win32_DesktopMonitor()
|
||||
self.cpu = self.c.Win32_Processor()
|
||||
self.usb = self.c.Win32_USBController()
|
||||
|
||||
def get_all(self, obj):
|
||||
ret = []
|
||||
for i in obj:
|
||||
tmp = [
|
||||
{j: getattr(i, j)}
|
||||
for j in list(i.properties)
|
||||
if getattr(i, j) is not None
|
||||
]
|
||||
ret.append(tmp)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def system_info():
|
||||
info = SystemDetail()
|
||||
return {
|
||||
"comp_sys_prod": info.get_all(info.comp_sys_prod),
|
||||
"comp_sys": info.get_all(info.comp_sys),
|
||||
"mem": info.get_all(info.memory),
|
||||
"os": info.get_all(info.os),
|
||||
"base_board": info.get_all(info.base_board),
|
||||
"bios": info.get_all(info.bios),
|
||||
"disk": info.get_all(info.disk),
|
||||
"network_adapter": info.get_all(info.network_adapter),
|
||||
"network_config": info.get_all(info.network_config),
|
||||
"desktop_monitor": info.get_all(info.desktop_monitor),
|
||||
"cpu": info.get_all(info.cpu),
|
||||
"usb": info.get_all(info.usb),
|
||||
}
|
||||
|
||||
|
||||
def local_sys_info():
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
|
||||
|
||||
|
||||
def get_procs():
|
||||
ret = []
|
||||
|
||||
# setup
|
||||
for proc in psutil.process_iter():
|
||||
with proc.oneshot():
|
||||
proc.cpu_percent(interval=None)
|
||||
|
||||
# need time for psutil to record cpu percent
|
||||
sleep(1)
|
||||
|
||||
for c, proc in enumerate(psutil.process_iter(), 1):
|
||||
x = {}
|
||||
with proc.oneshot():
|
||||
if proc.pid == 0 or not proc.name():
|
||||
continue
|
||||
|
||||
x["name"] = proc.name()
|
||||
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
|
||||
x["memory_percent"] = proc.memory_percent()
|
||||
x["pid"] = proc.pid
|
||||
x["ppid"] = proc.ppid()
|
||||
x["status"] = proc.status()
|
||||
x["username"] = proc.username()
|
||||
x["id"] = c
|
||||
|
||||
ret.append(x)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _compress_json(j):
|
||||
return {
|
||||
"wineventlog": base64.b64encode(
|
||||
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
|
||||
).decode("ascii", errors="ignore")
|
||||
}
|
||||
|
||||
|
||||
def get_eventlog(logtype, last_n_days):
|
||||
|
||||
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
|
||||
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
|
||||
|
||||
status_dict = {
|
||||
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
|
||||
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
|
||||
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
|
||||
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
|
||||
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
|
||||
0: "INFO",
|
||||
}
|
||||
|
||||
computer = "localhost"
|
||||
hand = win32evtlog.OpenEventLog(computer, logtype)
|
||||
total = win32evtlog.GetNumberOfEventLogRecords(hand)
|
||||
log = []
|
||||
uid = 0
|
||||
done = False
|
||||
|
||||
try:
|
||||
while 1:
|
||||
events = win32evtlog.ReadEventLog(hand, flags, 0)
|
||||
for ev_obj in events:
|
||||
|
||||
uid += 1
|
||||
# return once total number of events reach or we'll be stuck in an infinite loop
|
||||
if uid >= total:
|
||||
done = True
|
||||
break
|
||||
|
||||
the_time = ev_obj.TimeGenerated.Format()
|
||||
time_obj = datetime.datetime.strptime(the_time, "%c")
|
||||
if time_obj < start_time:
|
||||
done = True
|
||||
break
|
||||
|
||||
computer = str(ev_obj.ComputerName)
|
||||
src = str(ev_obj.SourceName)
|
||||
evt_type = str(status_dict[ev_obj.EventType])
|
||||
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
|
||||
evt_category = str(ev_obj.EventCategory)
|
||||
record = str(ev_obj.RecordNumber)
|
||||
msg = (
|
||||
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
|
||||
.replace("<", "")
|
||||
.replace(">", "")
|
||||
)
|
||||
|
||||
event_dict = {
|
||||
"computer": computer,
|
||||
"source": src,
|
||||
"eventType": evt_type,
|
||||
"eventID": evt_id,
|
||||
"eventCategory": evt_category,
|
||||
"message": msg,
|
||||
"time": the_time,
|
||||
"record": record,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
log.append(event_dict)
|
||||
|
||||
if done:
|
||||
break
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
win32evtlog.CloseEventLog(hand)
|
||||
return _compress_json(log)
|
||||
@@ -26,7 +26,7 @@ def get_wmi_data():
|
||||
agent = Recipe(
|
||||
Agent,
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.1.1",
|
||||
version="1.3.0",
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import requests
|
||||
import time
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
@@ -9,6 +8,7 @@ import validators
|
||||
import msgpack
|
||||
import re
|
||||
from collections import Counter
|
||||
from typing import List
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from distutils.version import LooseVersion
|
||||
@@ -117,14 +117,6 @@ class Agent(BaseAuditModel):
|
||||
return settings.DL_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def winsalt_dl(self):
|
||||
if self.arch == "64":
|
||||
return settings.SALT_64
|
||||
elif self.arch == "32":
|
||||
return settings.SALT_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def win_inno_exe(self):
|
||||
if self.arch == "64":
|
||||
@@ -382,6 +374,13 @@ class Agent(BaseAuditModel):
|
||||
|
||||
return patch_policy
|
||||
|
||||
def get_approved_update_guids(self) -> List[str]:
|
||||
return list(
|
||||
self.winupdates.filter(action="approve", installed=False).values_list(
|
||||
"guid", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
def generate_checks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
@@ -452,77 +451,6 @@ class Agent(BaseAuditModel):
|
||||
await nc.flush()
|
||||
await nc.close()
|
||||
|
||||
def salt_api_cmd(self, **kwargs):
|
||||
|
||||
# salt should always timeout first before the requests' timeout
|
||||
try:
|
||||
timeout = kwargs["timeout"]
|
||||
except KeyError:
|
||||
# default timeout
|
||||
timeout = 15
|
||||
salt_timeout = 12
|
||||
else:
|
||||
if timeout < 8:
|
||||
timeout = 8
|
||||
salt_timeout = 5
|
||||
else:
|
||||
salt_timeout = timeout - 3
|
||||
|
||||
json = {
|
||||
"client": "local",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"timeout": salt_timeout,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[json],
|
||||
timeout=timeout,
|
||||
)
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
try:
|
||||
ret = resp.json()["return"][0][self.salt_id]
|
||||
except Exception as e:
|
||||
logger.error(f"{self.salt_id}: {e}")
|
||||
return "error"
|
||||
else:
|
||||
return ret
|
||||
|
||||
def salt_api_async(self, **kwargs):
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
# serializes the agent and returns json
|
||||
@@ -533,32 +461,6 @@ class Agent(BaseAuditModel):
|
||||
del ret["client"]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def salt_batch_async(**kwargs):
|
||||
assert isinstance(kwargs["minions"], list)
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt_type": "list",
|
||||
"tgt": kwargs["minions"],
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
pks = [] # list of pks to delete
|
||||
|
||||
@@ -34,6 +34,12 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ["pk", "overdue_email_alert", "overdue_text_alert"]
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
@@ -54,7 +60,7 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
else:
|
||||
agent_tz = self.context["default_tz"]
|
||||
|
||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
|
||||
return obj.last_seen.astimezone(agent_tz).timestamp()
|
||||
|
||||
def get_logged_username(self, obj) -> str:
|
||||
if obj.logged_in_username == "None" and obj.status == "online":
|
||||
|
||||
@@ -2,7 +2,6 @@ import asyncio
|
||||
from loguru import logger
|
||||
from time import sleep
|
||||
import random
|
||||
import requests
|
||||
from packaging import version as pyver
|
||||
from typing import List
|
||||
|
||||
@@ -20,9 +19,11 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
def _check_agent_service(pk: int) -> None:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=2))
|
||||
# if the agent is respoding to pong from the rpc service but is not showing as online (handled by tacticalagent service)
|
||||
# then tacticalagent service is hung. forcefully restart it
|
||||
if r == "pong":
|
||||
logger.info(
|
||||
f"Detected crashed tacticalagent service on {agent.hostname}, attempting recovery"
|
||||
f"Detected crashed tacticalagent service on {agent.hostname} v{agent.version}, attempting recovery"
|
||||
)
|
||||
data = {"func": "recover", "payload": {"mode": "tacagent"}}
|
||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||
@@ -50,7 +51,7 @@ def check_in_task() -> None:
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
q = Agent.objects.all()
|
||||
q = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
agents: List[int] = [i.pk for i in q if i.has_nats and i.status != "online"]
|
||||
for agent in agents:
|
||||
_check_agent_service(agent)
|
||||
@@ -63,9 +64,18 @@ def agent_update(pk: int) -> str:
|
||||
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
||||
return "noarch"
|
||||
|
||||
version = settings.LATEST_AGENT_VER
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
||||
version = settings.LATEST_AGENT_VER
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
else:
|
||||
version = "1.3.0"
|
||||
inno = (
|
||||
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
||||
)
|
||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||
|
||||
if agent.has_nats:
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
@@ -101,6 +111,10 @@ def agent_update(pk: int) -> str:
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
return "created"
|
||||
else:
|
||||
logger.warning(
|
||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to update."
|
||||
)
|
||||
|
||||
return "not supported"
|
||||
|
||||
@@ -142,7 +156,7 @@ def auto_self_agent_update_task() -> None:
|
||||
|
||||
@app.task
|
||||
def get_wmi_task():
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
online = [
|
||||
i
|
||||
for i in agents
|
||||
@@ -159,7 +173,7 @@ def get_wmi_task():
|
||||
|
||||
@app.task
|
||||
def sync_sysinfo_task():
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
online = [
|
||||
i
|
||||
for i in agents
|
||||
@@ -177,94 +191,6 @@ def sync_sysinfo_task():
|
||||
sleep(rand)
|
||||
|
||||
|
||||
@app.task
|
||||
def sync_salt_modules_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
|
||||
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
|
||||
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
|
||||
if r == "timeout" or r == "error":
|
||||
return f"Unable to sync modules {agent.salt_id}"
|
||||
|
||||
return f"Successfully synced salt modules on {agent.hostname}"
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sync_modules_task():
|
||||
# sync modules, split into chunks of 50 agents to not overload salt
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents]
|
||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def uninstall_agent_task(salt_id, has_nats):
|
||||
attempts = 0
|
||||
error = False
|
||||
|
||||
if not has_nats:
|
||||
while 1:
|
||||
try:
|
||||
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "local",
|
||||
"tgt": salt_id,
|
||||
"fun": "win_agent.uninstall_agent",
|
||||
"timeout": 8,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=10,
|
||||
)
|
||||
ret = r.json()["return"][0][salt_id]
|
||||
except Exception:
|
||||
attempts += 1
|
||||
else:
|
||||
if ret != "ok":
|
||||
attempts += 1
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts >= 10:
|
||||
error = True
|
||||
break
|
||||
elif attempts == 0:
|
||||
break
|
||||
|
||||
if error:
|
||||
logger.error(f"{salt_id} uninstall failed")
|
||||
else:
|
||||
logger.info(f"{salt_id} was successfully uninstalled")
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.delete",
|
||||
"match": salt_id,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
logger.error(f"{salt_id} unable to remove salt-key")
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outage_email_task(pk):
|
||||
sleep(random.randint(1, 15))
|
||||
@@ -328,13 +254,6 @@ def agent_outages_task():
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
|
||||
|
||||
@app.task
|
||||
def install_salt_task(pk: int) -> None:
|
||||
sleep(20)
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
asyncio.run(agent.nats_cmd({"func": "installsalt"}, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_agent_recovery_task(pk: int) -> None:
|
||||
sleep(10)
|
||||
@@ -396,3 +315,18 @@ def run_script_email_results_task(
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
@app.task
|
||||
def remove_salt_task() -> None:
|
||||
if hasattr(settings, "KEEP_SALT") and settings.KEEP_SALT:
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
asyncio.run(agent.nats_cmd({"func": "removesalt"}, wait=False))
|
||||
sleep(0.1)
|
||||
sleep(4)
|
||||
|
||||
@@ -14,12 +14,6 @@ from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import AgentSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from .models import Agent
|
||||
from .tasks import (
|
||||
agent_recovery_sms_task,
|
||||
auto_self_agent_update_task,
|
||||
sync_salt_modules_task,
|
||||
batch_sync_modules_task,
|
||||
)
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
|
||||
@@ -110,9 +104,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
@patch("agents.views.reload_nats")
|
||||
def test_uninstall(self, reload_nats, mock_task, nats_cmd):
|
||||
def test_uninstall(self, reload_nats, nats_cmd):
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
@@ -121,7 +114,6 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
||||
reload_nats.assert_called_once()
|
||||
mock_task.assert_called_with(self.agent.salt_id, True)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -335,7 +327,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("pending", r.json())
|
||||
@@ -355,7 +347,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.agent.version = "0.9.4"
|
||||
self.agent.save(update_fields=["version"])
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("0.9.5", r.json())
|
||||
@@ -487,42 +479,20 @@ class TestAgentViews(TacticalTestCase):
|
||||
def test_overdue_action(self):
|
||||
url = "/agents/overdueaction/"
|
||||
|
||||
payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"}
|
||||
payload = {"pk": self.agent.pk, "overdue_email_alert": True}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "disabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "enabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "disabled"})
|
||||
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "523423"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload.update({"alertType": "text", "action": "asdasd3434asdasd"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_list_agents_no_detail(self):
|
||||
@@ -543,7 +513,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
""" @patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
||||
@patch("agents.models.Agent.salt_batch_async")
|
||||
@@ -585,7 +555,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
""" payload = {
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "workstations",
|
||||
"target": "client",
|
||||
@@ -599,7 +569,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300) """
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
@@ -657,7 +627,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
# TODO mock the script
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("post", url) """
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_recover_mesh(self, nats_cmd):
|
||||
@@ -759,41 +729,6 @@ class TestAgentTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_salt_modules_task(self, salt_api_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]}
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules")
|
||||
self.assertEqual(
|
||||
ret.result, f"Successfully synced salt modules on {self.agent.hostname}"
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
||||
|
||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
|
||||
# chunks of 50, should run 4 times
|
||||
baker.make_recipe(
|
||||
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.overdue_agent",
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=9),
|
||||
_quantity=115,
|
||||
)
|
||||
ret = batch_sync_modules_task.s().apply()
|
||||
self.assertEqual(salt_batch_async.call_count, 4)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_agent_update(self, nats_cmd):
|
||||
from agents.tasks import agent_update
|
||||
@@ -823,19 +758,20 @@ class TestAgentTasks(TacticalTestCase):
|
||||
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(action.details["url"], settings.DL_64)
|
||||
self.assertEqual(
|
||||
action.details["inno"], f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
action.details["url"],
|
||||
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
)
|
||||
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||
self.assertEqual(action.details["version"], "1.3.0")
|
||||
|
||||
agent64 = baker.make_recipe(
|
||||
agent_64_130 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.12",
|
||||
version="1.3.0",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent64.pk)
|
||||
r = agent_update(agent_64_130.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
@@ -849,6 +785,26 @@ class TestAgentTasks(TacticalTestCase):
|
||||
wait=False,
|
||||
)
|
||||
|
||||
agent64_old = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.2.1",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent64_old.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
|
||||
""" @patch("agents.models.Agent.salt_api_async")
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
||||
|
||||
@@ -7,6 +7,7 @@ import random
|
||||
import string
|
||||
import datetime as dt
|
||||
from packaging import version as pyver
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -29,15 +30,15 @@ from .serializers import (
|
||||
AgentEditSerializer,
|
||||
NoteSerializer,
|
||||
NotesSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
)
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .tasks import (
|
||||
uninstall_agent_task,
|
||||
send_agent_update_task,
|
||||
run_script_email_results_task,
|
||||
)
|
||||
from winupdate.tasks import bulk_check_for_updates_task
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
@@ -72,10 +73,6 @@ def ping(request, pk):
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
else:
|
||||
r = agent.salt_api_cmd(timeout=5, func="test.ping")
|
||||
if isinstance(r, bool) and r:
|
||||
status = "online"
|
||||
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
@@ -86,13 +83,9 @@ def uninstall(request):
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
salt_id = agent.salt_id
|
||||
name = agent.hostname
|
||||
has_nats = agent.has_nats
|
||||
agent.delete()
|
||||
reload_nats()
|
||||
|
||||
uninstall_agent_task.delay(salt_id, has_nats)
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@@ -341,26 +334,12 @@ def by_site(request, sitepk):
|
||||
|
||||
@api_view(["POST"])
|
||||
def overdue_action(request):
|
||||
pk = request.data["pk"]
|
||||
alert_type = request.data["alertType"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if alert_type == "email" and action == "enabled":
|
||||
agent.overdue_email_alert = True
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "email" and action == "disabled":
|
||||
agent.overdue_email_alert = False
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "text" and action == "enabled":
|
||||
agent.overdue_text_alert = True
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
elif alert_type == "text" and action == "disabled":
|
||||
agent.overdue_text_alert = False
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
serializer = AgentOverdueActionSerializer(
|
||||
instance=agent, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response(agent.hostname)
|
||||
|
||||
|
||||
@@ -481,7 +460,7 @@ def install_agent(request):
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={client_id}'",
|
||||
f"-X 'main.Site={site_id}'",
|
||||
@@ -611,8 +590,6 @@ def install_agent(request):
|
||||
resp = {
|
||||
"cmd": " ".join(str(i) for i in cmd),
|
||||
"url": download_url,
|
||||
"salt64": settings.SALT_64,
|
||||
"salt32": settings.SALT_32,
|
||||
}
|
||||
|
||||
return Response(resp)
|
||||
@@ -673,17 +650,12 @@ def recover(request):
|
||||
return notify_error("Only available in agent version greater than 0.9.5")
|
||||
|
||||
if not agent.has_nats:
|
||||
if mode == "tacagent" or mode == "checkrunner" or mode == "rpc":
|
||||
if mode == "tacagent" or mode == "rpc":
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||
if agent.has_nats:
|
||||
if (
|
||||
mode == "tacagent"
|
||||
or mode == "checkrunner"
|
||||
or mode == "salt"
|
||||
or mode == "mesh"
|
||||
):
|
||||
if mode == "tacagent" or mode == "mesh":
|
||||
data = {"func": "recover", "payload": {"mode": mode}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "ok":
|
||||
@@ -840,7 +812,7 @@ def bulk(request):
|
||||
elif request.data["target"] == "agents":
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
elif request.data["target"] == "all":
|
||||
q = Agent.objects.all()
|
||||
q = Agent.objects.only("pk", "monitoring_type")
|
||||
else:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -849,8 +821,7 @@ def bulk(request):
|
||||
elif request.data["monType"] == "workstations":
|
||||
q = q.filter(monitoring_type="workstation")
|
||||
|
||||
minions = [agent.salt_id for agent in q]
|
||||
agents = [agent.pk for agent in q]
|
||||
agents: List[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
|
||||
@@ -868,14 +839,12 @@ def bulk(request):
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(minions=minions)
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class Apiv2Config(AppConfig):
|
||||
name = "apiv2"
|
||||
@@ -1,38 +0,0 @@
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from unittest.mock import patch
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
|
||||
class TestAPIv2(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_modules(self, mock_ret):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
url = "/api/v2/saltminion/"
|
||||
payload = {"agent_id": agent.agent_id}
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = []
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Modules are already in sync")
|
||||
|
||||
mock_ret.return_value = ["modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("newagent/", v3_views.NewAgent.as_view()),
|
||||
path("meshexe/", v3_views.MeshExe.as_view()),
|
||||
path("saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("sysinfo/", v3_views.SysInfo.as_view()),
|
||||
path("hello/", v3_views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
]
|
||||
@@ -1,41 +0,0 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from checks.serializers import CheckRunnerGetSerializerV2
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request):
|
||||
check = get_object_or_404(Check, pk=request.data["id"])
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
return Response(status)
|
||||
@@ -26,23 +26,6 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_salt_minion(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
|
||||
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("latestVer", r.json().keys())
|
||||
self.assertIn("currentVer", r.json().keys())
|
||||
self.assertIn("salt_id", r.json().keys())
|
||||
self.assertIn("downloadURL", r.json().keys())
|
||||
|
||||
r2 = self.client.get(url2)
|
||||
self.assertEqual(r2.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_mesh_info(self):
|
||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
||||
|
||||
@@ -93,11 +76,11 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("agents.tasks.install_salt_task.delay")
|
||||
def test_install_salt(self, mock_task):
|
||||
url = f"/api/v3/{self.agent.agent_id}/installsalt/"
|
||||
def test_checkrunner_interval(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
mock_task.assert_called_with(self.agent.pk)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
||||
)
|
||||
|
||||
@@ -6,9 +6,8 @@ urlpatterns = [
|
||||
path("hello/", views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("saltminion/", views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
|
||||
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
||||
path("meshexe/", views.MeshExe.as_view()),
|
||||
path("sysinfo/", views.SysInfo.as_view()),
|
||||
@@ -17,5 +16,4 @@ urlpatterns = [
|
||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
||||
path("software/", views.Software.as_view()),
|
||||
path("installer/", views.Installer.as_view()),
|
||||
path("<str:agentid>/installsalt/", views.InstallSalt.as_view()),
|
||||
]
|
||||
|
||||
@@ -21,7 +21,7 @@ from autotasks.models import AutomatedTask
|
||||
from accounts.models import User
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from software.models import InstalledSoftware
|
||||
from checks.serializers import CheckRunnerGetSerializerV3
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from winupdate.serializers import ApprovedUpdateSerializer
|
||||
@@ -29,11 +29,7 @@ from winupdate.serializers import ApprovedUpdateSerializer
|
||||
from agents.tasks import (
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
sync_salt_modules_task,
|
||||
install_salt_task,
|
||||
)
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
from software.tasks import install_chocolatey
|
||||
from checks.utils import bytes2human
|
||||
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
||||
|
||||
@@ -132,15 +128,6 @@ class CheckIn(APIView):
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
|
||||
sync_salt_modules_task.delay(agent.pk)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
||||
)
|
||||
|
||||
if not agent.choco_installed:
|
||||
install_chocolatey.delay(agent.pk, wait=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -227,15 +214,6 @@ class Hello(APIView):
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
|
||||
sync_salt_modules_task.delay(agent.pk)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
||||
)
|
||||
|
||||
if not agent.choco_installed:
|
||||
install_chocolatey.delay(agent.pk, wait=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -254,7 +232,7 @@ class CheckRunner(APIView):
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
@@ -267,6 +245,15 @@ class CheckRunner(APIView):
|
||||
return Response(status)
|
||||
|
||||
|
||||
class CheckRunnerInterval(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows golang agent
|
||||
@@ -305,77 +292,6 @@ class TaskRunner(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SaltMinion(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
"downloadURL": agent.winsalt_dl,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def post(self, request):
|
||||
# accept the salt key
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if agent.salt_id != request.data["saltid"]:
|
||||
return notify_error("Salt keys do not match")
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.accept",
|
||||
"match": request.data["saltid"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
return notify_error("No communication between agent and salt-api")
|
||||
|
||||
try:
|
||||
data = resp.json()["return"][0]["data"]
|
||||
minion = data["return"]["minions"][0]
|
||||
except Exception:
|
||||
return notify_error("Key error")
|
||||
|
||||
if data["success"] and minion == request.data["saltid"]:
|
||||
return Response("Salt key was accepted")
|
||||
else:
|
||||
return notify_error("Not accepted")
|
||||
|
||||
def patch(self, request):
|
||||
# sync modules
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
return notify_error("Failed to sync salt modules")
|
||||
|
||||
if isinstance(r, list) and any("modules" in i for i in r):
|
||||
return Response("Successfully synced salt modules")
|
||||
elif isinstance(r, list) and not r:
|
||||
return Response("Modules are already in sync")
|
||||
else:
|
||||
return notify_error(f"Failed to sync salt modules: {str(r)}")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WinUpdater(APIView):
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
@@ -416,6 +332,7 @@ class WinUpdater(APIView):
|
||||
update.installed = True
|
||||
update.save(update_fields=["result", "downloaded", "installed"])
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
# agent calls this after it's finished installing all patches
|
||||
@@ -437,19 +354,11 @@ class WinUpdater(APIView):
|
||||
if reboot:
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
else:
|
||||
agent.salt_api_async(
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
logger.info(
|
||||
f"{agent.hostname} is rebooting after updates were installed."
|
||||
)
|
||||
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -615,13 +524,3 @@ class Installer(APIView):
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class InstallSalt(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
install_salt_task.delay(agent.pk)
|
||||
return Response("ok")
|
||||
|
||||
@@ -11,11 +11,15 @@ def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server")
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation")
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
|
||||
@@ -84,11 +88,15 @@ def generate_agent_tasks_from_policies_task(policypk):
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server")
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation")
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
|
||||
|
||||
@@ -413,11 +413,15 @@ class UpdatePatchPolicy(APIView):
|
||||
|
||||
agents = None
|
||||
if "client" in request.data:
|
||||
agents = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site__client_id=request.data["client"]
|
||||
)
|
||||
elif "site" in request.data:
|
||||
agents = Agent.objects.filter(site_id=request.data["site"])
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site_id=request.data["site"]
|
||||
)
|
||||
else:
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
|
||||
|
||||
for agent in agents:
|
||||
winupdatepolicy = agent.winupdatepolicy.get()
|
||||
|
||||
@@ -7,7 +7,7 @@ class Command(BaseCommand):
|
||||
help = "Checks for orphaned tasks on all agents and removes them"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
@@ -445,42 +445,6 @@ class Check(BaseAuditModel):
|
||||
|
||||
return self.status
|
||||
|
||||
def handle_check(self, data):
|
||||
if self.check_type != "cpuload" and self.check_type != "memory":
|
||||
|
||||
if data["status"] == "passing" and self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
elif data["status"] == "failing":
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
else:
|
||||
self.history.append(data["percent"])
|
||||
|
||||
if len(self.history) > 15:
|
||||
self.history = self.history[-15:]
|
||||
|
||||
self.save(update_fields=["history"])
|
||||
|
||||
avg = int(mean(self.history))
|
||||
|
||||
if avg > self.threshold:
|
||||
self.status = "failing"
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.status = "passing"
|
||||
if self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.save(update_fields=["status"])
|
||||
|
||||
if self.email_alert and self.fail_count >= self.fails_b4_alert:
|
||||
handle_check_email_alert_task.delay(self.pk)
|
||||
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
|
||||
@@ -95,101 +95,7 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
# for the windows agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_task = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_task(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
# this will not break agents on version 0.10.2 or lower
|
||||
# newer agents once released will properly handle multiple tasks assigned to a check
|
||||
task = obj.assignedtask.first()
|
||||
return AssignedTaskCheckRunnerField(task).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV2(serializers.ModelSerializer):
|
||||
# for the windows __python__ agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_tasks(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
tasks = obj.assignedtask.all()
|
||||
return AssignedTaskCheckRunnerField(tasks, many=True).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV3(serializers.ModelSerializer):
|
||||
# for the windows __golang__ agent
|
||||
# only send data needed for agent to run a check
|
||||
# the difference here is in the script serializer
|
||||
# script checks no longer rely on salt and are executed directly by the go agent
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
|
||||
|
||||
@@ -2,9 +2,9 @@ from checks.models import CheckHistory
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import CheckSerializer
|
||||
from django.utils import timezone as djangotime
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
|
||||
class TestCheckViews(TacticalTestCase):
|
||||
@@ -184,6 +184,48 @@ class TestCheckViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("patch", url_a)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_checks(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.agent", version="1.4.1")
|
||||
agent_old = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
|
||||
|
||||
url = f"/checks/runchecks/{agent_old.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater")
|
||||
|
||||
url = f"/checks/runchecks/{agent_b4_141.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, wait=False)
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "busy"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks are already running on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "ok"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "timeout"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), "Unable to contact the agent")
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_check_history(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
from packaging import version as pyver
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.db.models import Q
|
||||
@@ -168,8 +169,17 @@ def run_checks(request, pk):
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
|
||||
return Response(agent.hostname)
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
|
||||
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
|
||||
if r == "busy":
|
||||
return notify_error(f"Checks are already running on {agent.hostname}")
|
||||
elif r == "ok":
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
else:
|
||||
return notify_error("Unable to contact the agent")
|
||||
else:
|
||||
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
|
||||
@@ -223,7 +223,7 @@ class GenerateAgent(APIView):
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={d.client.pk}'",
|
||||
f"-X 'main.Site={d.site.pk}'",
|
||||
|
||||
@@ -57,7 +57,6 @@ func main() {
|
||||
|
||||
debugLog := flag.String("log", "", "Verbose output")
|
||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||
noSalt := flag.Bool("nosalt", false, "Does not install salt")
|
||||
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
||||
cert := flag.String("cert", "", "Path to ca.pem")
|
||||
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
||||
@@ -86,10 +85,6 @@ func main() {
|
||||
cmdArgs = append(cmdArgs, "-silent")
|
||||
}
|
||||
|
||||
if *noSalt {
|
||||
cmdArgs = append(cmdArgs, "-nosalt")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ class Command(BaseCommand):
|
||||
# 10-16-2020 changed the type of the agent's 'disks' model field
|
||||
# from a dict of dicts, to a list of disks in the golang agent
|
||||
# the following will convert dicts to lists for agent's still on the python agent
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "disks")
|
||||
for agent in agents:
|
||||
if agent.disks is not None and isinstance(agent.disks, dict):
|
||||
new = []
|
||||
|
||||
@@ -83,8 +83,9 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("tacticalrmm.utils.reload_nats")
|
||||
@patch("autotasks.tasks.remove_orphaned_win_tasks.delay")
|
||||
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks):
|
||||
def test_ui_maintenance_actions(self, remove_orphaned_win_tasks, reload_nats):
|
||||
url = "/core/servermaintenance/"
|
||||
|
||||
agents = baker.make_recipe("agents.online_agent", _quantity=3)
|
||||
@@ -103,6 +104,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
data = {"action": "reload_nats"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
reload_nats.assert_called_once()
|
||||
|
||||
# test prune db with no tables
|
||||
data = {"action": "prune_db"}
|
||||
|
||||
@@ -105,7 +105,7 @@ def server_maintenance(request):
|
||||
from agents.models import Agent
|
||||
from autotasks.tasks import remove_orphaned_win_tasks
|
||||
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
@@ -140,7 +140,7 @@ def cancel_pending_action(request):
|
||||
def debug_log(request, mode, hostname, order):
|
||||
log_file = settings.LOG_CONFIG["handlers"][0]["sink"]
|
||||
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
|
||||
agent_hostnames = AgentHostnameSerializer(agents, many=True)
|
||||
|
||||
switch_mode = {
|
||||
|
||||
@@ -5,4 +5,6 @@ urlpatterns = [
|
||||
path("natsinfo/", views.nats_info),
|
||||
path("checkin/", views.NatsCheckIn.as_view()),
|
||||
path("syncmesh/", views.SyncMeshNodeID.as_view()),
|
||||
path("winupdates/", views.NatsWinUpdates.as_view()),
|
||||
path("choco/", views.NatsChoco.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import asyncio
|
||||
import time
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@@ -12,6 +15,7 @@ from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from agents.models import Agent
|
||||
from winupdate.models import WinUpdate
|
||||
from software.models import InstalledSoftware
|
||||
from checks.utils import bytes2human
|
||||
from agents.serializers import WinAgentSerializer
|
||||
@@ -23,6 +27,8 @@ from agents.tasks import (
|
||||
|
||||
from tacticalrmm.utils import notify_error, filter_software, SoftwareList
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([])
|
||||
@@ -112,6 +118,16 @@ class NatsCheckIn(APIView):
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
# called once during tacticalagent windows service startup
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if not agent.choco_installed:
|
||||
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
|
||||
|
||||
time.sleep(0.5)
|
||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SyncMeshNodeID(APIView):
|
||||
authentication_classes = []
|
||||
@@ -124,3 +140,100 @@ class SyncMeshNodeID(APIView):
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class NatsChoco(APIView):
|
||||
authentication_classes = []
|
||||
permission_classes = []
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.choco_installed = request.data["installed"]
|
||||
agent.save(update_fields=["choco_installed"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class NatsWinUpdates(APIView):
|
||||
authentication_classes = []
|
||||
permission_classes = []
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["needs_reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
u = agent.winupdates.filter(guid=request.data["guid"]).last()
|
||||
success: bool = request.data["success"]
|
||||
if success:
|
||||
u.result = "success"
|
||||
u.downloaded = True
|
||||
u.installed = True
|
||||
u.date_installed = djangotime.now()
|
||||
u.save(
|
||||
update_fields=[
|
||||
"result",
|
||||
"downloaded",
|
||||
"installed",
|
||||
"date_installed",
|
||||
]
|
||||
)
|
||||
else:
|
||||
u.result = "failed"
|
||||
u.save(update_fields=["result"])
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
updates = request.data["wua_updates"]
|
||||
for update in updates:
|
||||
if agent.winupdates.filter(guid=update["guid"]).exists():
|
||||
u = agent.winupdates.filter(guid=update["guid"]).last()
|
||||
u.downloaded = update["downloaded"]
|
||||
u.installed = update["installed"]
|
||||
u.save(update_fields=["downloaded", "installed"])
|
||||
else:
|
||||
try:
|
||||
kb = "KB" + update["kb_article_ids"][0]
|
||||
except:
|
||||
continue
|
||||
|
||||
WinUpdate(
|
||||
agent=agent,
|
||||
guid=update["guid"],
|
||||
kb=kb,
|
||||
title=update["title"],
|
||||
installed=update["installed"],
|
||||
downloaded=update["downloaded"],
|
||||
description=update["description"],
|
||||
severity=update["severity"],
|
||||
categories=update["categories"],
|
||||
category_ids=update["category_ids"],
|
||||
kb_article_ids=update["kb_article_ids"],
|
||||
more_info_urls=update["more_info_urls"],
|
||||
support_url=update["support_url"],
|
||||
revision_number=update["revision_number"],
|
||||
).save()
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
black
|
||||
Werkzeug
|
||||
django-extensions
|
||||
mkdocs
|
||||
mkdocs-material
|
||||
pymdown-extensions
|
||||
@@ -193,6 +193,6 @@
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "TRMM Defender Exclusions",
|
||||
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||
"shell": "cmd"
|
||||
"shell": "powershell"
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
@@ -49,7 +49,6 @@ class Script(BaseAuditModel):
|
||||
|
||||
# load community uploaded scripts into the database
|
||||
# skip ones that already exist, only updating name / desc in case it changes
|
||||
# files will be copied by the update script or in docker to /srv/salt/scripts
|
||||
|
||||
# for install script
|
||||
if not settings.DOCKER_BUILD:
|
||||
@@ -73,6 +72,7 @@ class Script(BaseAuditModel):
|
||||
i.name = script["name"]
|
||||
i.description = script["description"]
|
||||
i.category = "Community"
|
||||
i.shell = script["shell"]
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
@@ -81,7 +81,13 @@ class Script(BaseAuditModel):
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||
|
||||
i.save(
|
||||
update_fields=["name", "description", "category", "code_base64"]
|
||||
update_fields=[
|
||||
"name",
|
||||
"description",
|
||||
"category",
|
||||
"code_base64",
|
||||
"shell",
|
||||
]
|
||||
)
|
||||
else:
|
||||
print(f"Adding new community script: {script['name']}")
|
||||
|
||||
@@ -6,60 +6,26 @@ from scripts.models import Script
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout):
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
agents_salt = [agent for agent in agents if not agent.has_nats]
|
||||
minions = [agent.salt_id for agent in agents_salt]
|
||||
|
||||
if minions:
|
||||
Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="cmd.run_bg",
|
||||
kwargs={
|
||||
"cmd": cmd,
|
||||
"shell": shell,
|
||||
"timeout": timeout,
|
||||
},
|
||||
)
|
||||
|
||||
if agents_nats:
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"command": cmd,
|
||||
"shell": shell,
|
||||
},
|
||||
}
|
||||
for agent in agents_nats:
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"command": cmd,
|
||||
"shell": shell,
|
||||
},
|
||||
}
|
||||
for agent in agents_nats:
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout):
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
agents_salt = [agent for agent in agents if not agent.has_nats]
|
||||
minions = [agent.salt_id for agent in agents_salt]
|
||||
|
||||
if minions:
|
||||
Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": timeout,
|
||||
"args": args,
|
||||
"bg": True if script.shell == "python" else False, # salt bg script bug
|
||||
},
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "runscript",
|
||||
"timeout": timeout,
|
||||
|
||||
@@ -1343,10 +1343,5 @@
|
||||
"name": "tacticalagent",
|
||||
"description": "Tactical RMM Monitoring Agent",
|
||||
"display_name": "Tactical RMM Agent"
|
||||
},
|
||||
{
|
||||
"name": "checkrunner",
|
||||
"description": "Tactical Agent Background Check Runner",
|
||||
"display_name": "Tactical Agent Check Runner"
|
||||
}
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,5 +13,8 @@ class Command(BaseCommand):
|
||||
with open(os.path.join(settings.BASE_DIR, "software/chocos.json")) as f:
|
||||
chocos = json.load(f)
|
||||
|
||||
if ChocoSoftware.objects.exists():
|
||||
ChocoSoftware.objects.all().delete()
|
||||
|
||||
ChocoSoftware(chocos=chocos).save()
|
||||
self.stdout.write("Chocos saved to db")
|
||||
|
||||
@@ -7,30 +7,6 @@ class ChocoSoftware(models.Model):
|
||||
chocos = models.JSONField()
|
||||
added = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
@classmethod
|
||||
def sort_by_highest(cls):
|
||||
from .serializers import ChocoSoftwareSerializer
|
||||
|
||||
chocos = cls.objects.all()
|
||||
sizes = [
|
||||
{"size": len(ChocoSoftwareSerializer(i).data["chocos"]), "pk": i.pk}
|
||||
for i in chocos
|
||||
]
|
||||
biggest = max(range(len(sizes)), key=lambda index: sizes[index]["size"])
|
||||
return int(sizes[biggest]["pk"])
|
||||
|
||||
@classmethod
|
||||
def combine_all(cls):
|
||||
from .serializers import ChocoSoftwareSerializer
|
||||
|
||||
chocos = cls.objects.all()
|
||||
combined = []
|
||||
for i in chocos:
|
||||
combined.extend(ChocoSoftwareSerializer(i).data["chocos"])
|
||||
|
||||
# remove duplicates
|
||||
return [dict(t) for t in {tuple(d.items()) for d in combined}]
|
||||
|
||||
def __str__(self):
|
||||
from .serializers import ChocoSoftwareSerializer
|
||||
|
||||
|
||||
@@ -1,103 +1,24 @@
|
||||
import asyncio
|
||||
from time import sleep
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
from .models import ChocoSoftware, ChocoLog, InstalledSoftware
|
||||
from tacticalrmm.utils import filter_software
|
||||
from .models import ChocoLog
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task()
|
||||
def install_chocolatey(pk, wait=False):
|
||||
if wait:
|
||||
sleep(15)
|
||||
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=120, func="chocolatey.bootstrap", arg="force=True")
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
logger.error(f"failed to install choco on {agent.salt_id}")
|
||||
return
|
||||
|
||||
try:
|
||||
output = r.lower()
|
||||
except Exception as e:
|
||||
logger.error(f"failed to install choco on {agent.salt_id}: {e}")
|
||||
return
|
||||
|
||||
success = ["chocolatey", "is", "now", "ready"]
|
||||
|
||||
if all(x in output for x in success):
|
||||
agent.choco_installed = True
|
||||
agent.save(update_fields=["choco_installed"])
|
||||
logger.info(f"Installed chocolatey on {agent.salt_id}")
|
||||
return "ok"
|
||||
else:
|
||||
logger.error(f"failed to install choco on {agent.salt_id}")
|
||||
return
|
||||
|
||||
|
||||
@app.task
|
||||
def update_chocos():
|
||||
# delete choco software older than 10 days
|
||||
try:
|
||||
first = ChocoSoftware.objects.first().pk
|
||||
q = ChocoSoftware.objects.exclude(pk=first).filter(
|
||||
added__lte=djangotime.now() - djangotime.timedelta(days=10)
|
||||
)
|
||||
q.delete()
|
||||
except:
|
||||
pass
|
||||
|
||||
agents = Agent.objects.only("pk")
|
||||
online = [x for x in agents if x.status == "online" and x.choco_installed]
|
||||
|
||||
while 1:
|
||||
for agent in online:
|
||||
|
||||
r = agent.salt_api_cmd(timeout=10, func="test.ping")
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
continue
|
||||
|
||||
if isinstance(r, bool) and r:
|
||||
ret = agent.salt_api_cmd(timeout=200, func="chocolatey.list")
|
||||
if ret == "timeout" or ret == "error":
|
||||
continue
|
||||
|
||||
try:
|
||||
chocos = [{"name": k, "version": v[0]} for k, v in ret.items()]
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
# somtimes chocolatey api is down or buggy and doesn't return the full list of software
|
||||
if len(chocos) < 4000:
|
||||
continue
|
||||
else:
|
||||
logger.info(f"Chocos were updated using {agent.salt_id}")
|
||||
ChocoSoftware(chocos=chocos).save()
|
||||
break
|
||||
|
||||
break
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def install_program(pk, name, version):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=900,
|
||||
func="chocolatey.install",
|
||||
arg=[name, f"version={version}"],
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
nats_data = {
|
||||
"func": "installwithchoco",
|
||||
"choco_prog_name": name,
|
||||
"choco_prog_ver": version,
|
||||
}
|
||||
r: str = asyncio.run(agent.nats_cmd(nats_data, timeout=915))
|
||||
if r == "timeout":
|
||||
logger.error(f"Failed to install {name} {version} on {agent.salt_id}: timeout")
|
||||
return
|
||||
|
||||
|
||||
@@ -2,8 +2,7 @@ from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import InstalledSoftwareSerializer
|
||||
from model_bakery import baker
|
||||
from unittest.mock import patch
|
||||
from .models import InstalledSoftware, ChocoLog
|
||||
from agents.models import Agent
|
||||
from .models import ChocoLog
|
||||
|
||||
|
||||
class TestSoftwareViews(TacticalTestCase):
|
||||
@@ -64,83 +63,20 @@ class TestSoftwareViews(TacticalTestCase):
|
||||
|
||||
|
||||
class TestSoftwareTasks(TacticalTestCase):
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_install_chocolatey(self, salt_api_cmd):
|
||||
from .tasks import install_chocolatey
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = install_chocolatey(agent.pk)
|
||||
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=120, func="chocolatey.bootstrap", arg="force=True"
|
||||
)
|
||||
self.assertFalse(ret)
|
||||
|
||||
# test successful
|
||||
salt_api_cmd.return_value = "chocolatey is now ready"
|
||||
ret = install_chocolatey(agent.pk)
|
||||
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=120, func="chocolatey.bootstrap", arg="force=True"
|
||||
)
|
||||
self.assertTrue(ret)
|
||||
self.assertTrue(Agent.objects.get(pk=agent.pk).choco_installed)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_update_chocos(self, salt_api_cmd):
|
||||
from .tasks import update_chocos
|
||||
|
||||
# initialize data
|
||||
online_agent = baker.make_recipe("agents.online_agent", choco_installed=True)
|
||||
baker.make("software.ChocoSoftware", chocos={})
|
||||
|
||||
# return data
|
||||
chocolately_list = {
|
||||
"git": "2.3.4",
|
||||
"docker": "1.0.2",
|
||||
}
|
||||
|
||||
# test failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = update_chocos()
|
||||
|
||||
salt_api_cmd.assert_called_with(timeout=10, func="test.ping")
|
||||
self.assertTrue(ret)
|
||||
self.assertEquals(salt_api_cmd.call_count, 1)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test successful attempt
|
||||
salt_api_cmd.side_effect = [True, chocolately_list]
|
||||
ret = update_chocos()
|
||||
self.assertTrue(ret)
|
||||
salt_api_cmd.assert_any_call(timeout=10, func="test.ping")
|
||||
salt_api_cmd.assert_any_call(timeout=200, func="chocolatey.list")
|
||||
self.assertEquals(salt_api_cmd.call_count, 2)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_install_program(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_install_program(self, nats_cmd):
|
||||
from .tasks import install_program
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
|
||||
# failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = install_program(agent.pk, "git", "2.3.4")
|
||||
self.assertFalse(ret)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=900, func="chocolatey.install", arg=["git", "version=2.3.4"]
|
||||
)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# successfully attempt
|
||||
salt_api_cmd.return_value = "install of git was successful"
|
||||
ret = install_program(agent.pk, "git", "2.3.4")
|
||||
self.assertTrue(ret)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=900, func="chocolatey.install", arg=["git", "version=2.3.4"]
|
||||
nats_cmd.return_value = "install of git was successful"
|
||||
_ = install_program(agent.pk, "git", "2.3.4")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "installwithchoco",
|
||||
"choco_prog_name": "git",
|
||||
"choco_prog_ver": "2.3.4",
|
||||
},
|
||||
timeout=915,
|
||||
)
|
||||
|
||||
self.assertTrue(ChocoLog.objects.filter(agent=agent, name="git").exists())
|
||||
|
||||
@@ -8,14 +8,15 @@ from rest_framework.response import Response
|
||||
|
||||
from agents.models import Agent
|
||||
from .models import ChocoSoftware, InstalledSoftware
|
||||
from .serializers import InstalledSoftwareSerializer
|
||||
from .serializers import InstalledSoftwareSerializer, ChocoSoftwareSerializer
|
||||
from .tasks import install_program
|
||||
from tacticalrmm.utils import notify_error, filter_software
|
||||
|
||||
|
||||
@api_view()
|
||||
def chocos(request):
|
||||
return Response(ChocoSoftware.combine_all())
|
||||
chocos = ChocoSoftware.objects.last()
|
||||
return Response(ChocoSoftwareSerializer(chocos).data["chocos"])
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
|
||||
@@ -21,10 +21,6 @@ app.conf.task_track_started = True
|
||||
app.autodiscover_tasks()
|
||||
|
||||
app.conf.beat_schedule = {
|
||||
"update-chocos": {
|
||||
"task": "software.tasks.update_chocos",
|
||||
"schedule": crontab(minute=0, hour=4),
|
||||
},
|
||||
"auto-approve-win-updates": {
|
||||
"task": "winupdate.tasks.auto_approve_updates_task",
|
||||
"schedule": crontab(minute=2, hour="*/8"),
|
||||
@@ -53,6 +49,10 @@ app.conf.beat_schedule = {
|
||||
"task": "agents.tasks.monitor_agents_task",
|
||||
"schedule": crontab(minute="*/15"),
|
||||
},
|
||||
"remove-salt": {
|
||||
"task": "agents.tasks.remove_salt_task",
|
||||
"schedule": crontab(minute=14, hour="*/2"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -37,10 +37,7 @@ if not DEBUG:
|
||||
)
|
||||
})
|
||||
|
||||
SALT_USERNAME = "changeme"
|
||||
SALT_PASSWORD = "changeme"
|
||||
MESH_USERNAME = "changeme"
|
||||
MESH_SITE = "https://mesh.example.com"
|
||||
MESH_TOKEN_KEY = "changeme"
|
||||
REDIS_HOST = "localhost"
|
||||
SALT_HOST = "127.0.0.1"
|
||||
REDIS_HOST = "localhost"
|
||||
@@ -16,7 +16,6 @@ def get_debug_info():
|
||||
EXCLUDE_PATHS = (
|
||||
"/natsapi",
|
||||
"/api/v3",
|
||||
"/api/v2",
|
||||
"/logs/auditlogs",
|
||||
f"/{settings.ADMIN_URL}",
|
||||
"/logout",
|
||||
|
||||
@@ -15,32 +15,24 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.3.2"
|
||||
TRMM_VERSION = "0.4.2"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.105"
|
||||
|
||||
# https://github.com/wh1te909/salt
|
||||
LATEST_SALT_VER = "1.1.0"
|
||||
APP_VER = "0.0.109"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.2.1"
|
||||
LATEST_AGENT_VER = "1.4.1"
|
||||
|
||||
MESH_VER = "0.7.45"
|
||||
|
||||
SALT_MASTER_VER = "3002.2"
|
||||
MESH_VER = "0.7.54"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "7"
|
||||
NPM_VER = "6"
|
||||
NPM_VER = "7"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
DL_32 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}-x86.exe"
|
||||
|
||||
SALT_64 = f"https://github.com/wh1te909/salt/releases/download/{LATEST_SALT_VER}/salt-minion-setup.exe"
|
||||
SALT_32 = f"https://github.com/wh1te909/salt/releases/download/{LATEST_SALT_VER}/salt-minion-setup-x86.exe"
|
||||
|
||||
try:
|
||||
from .local_settings import *
|
||||
except ImportError:
|
||||
@@ -58,7 +50,6 @@ INSTALLED_APPS = [
|
||||
"knox",
|
||||
"corsheaders",
|
||||
"accounts",
|
||||
"apiv2",
|
||||
"apiv3",
|
||||
"clients",
|
||||
"agents",
|
||||
@@ -176,17 +167,14 @@ if "AZPIPELINE" in os.environ:
|
||||
}
|
||||
|
||||
ALLOWED_HOSTS = ["api.example.com"]
|
||||
DOCKER_BUILD = True
|
||||
DEBUG = True
|
||||
SECRET_KEY = "abcdefghijklmnoptravis123456789"
|
||||
|
||||
ADMIN_URL = "abc123456/"
|
||||
|
||||
SCRIPTS_DIR = os.path.join(Path(BASE_DIR).parents[1], "scripts")
|
||||
SALT_USERNAME = "pipeline"
|
||||
SALT_PASSWORD = "pipeline"
|
||||
MESH_USERNAME = "pipeline"
|
||||
MESH_SITE = "https://example.com"
|
||||
MESH_TOKEN_KEY = "bd65e957a1e70c622d32523f61508400d6cd0937001a7ac12042227eba0b9ed625233851a316d4f489f02994145f74537a331415d00047dbbf13d940f556806dffe7a8ce1de216dc49edbad0c1a7399c"
|
||||
REDIS_HOST = "localhost"
|
||||
SALT_HOST = "127.0.0.1"
|
||||
KEEP_SALT = False
|
||||
|
||||
@@ -10,7 +10,6 @@ urlpatterns = [
|
||||
path("login/", LoginView.as_view()),
|
||||
path("logout/", knox_views.LogoutView.as_view()),
|
||||
path("logoutall/", knox_views.LogoutAllView.as_view()),
|
||||
path("api/v2/", include("apiv2.urls")),
|
||||
path("api/v3/", include("apiv3.urls")),
|
||||
path("clients/", include("clients.urls")),
|
||||
path("agents/", include("agents.urls")),
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
# Generated by Django 3.1.5 on 2021-01-19 00:52
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("winupdate", "0009_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="winupdate",
|
||||
name="categories",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(blank=True, max_length=255, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="winupdate",
|
||||
name="category_ids",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(blank=True, max_length=255, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="winupdate",
|
||||
name="kb_article_ids",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(blank=True, max_length=255, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="winupdate",
|
||||
name="more_info_urls",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(blank=True, null=True),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="winupdate",
|
||||
name="revision_number",
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="winupdate",
|
||||
name="support_url",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="winupdate",
|
||||
name="date_installed",
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="winupdate",
|
||||
name="description",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="winupdate",
|
||||
name="guid",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="winupdate",
|
||||
name="kb",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="winupdate",
|
||||
name="title",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -42,20 +42,46 @@ class WinUpdate(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent, related_name="winupdates", on_delete=models.CASCADE
|
||||
)
|
||||
guid = models.CharField(max_length=255, null=True)
|
||||
kb = models.CharField(max_length=100, null=True)
|
||||
mandatory = models.BooleanField(default=False)
|
||||
title = models.TextField(null=True)
|
||||
needs_reboot = models.BooleanField(default=False)
|
||||
guid = models.CharField(max_length=255, null=True, blank=True)
|
||||
kb = models.CharField(max_length=100, null=True, blank=True)
|
||||
mandatory = models.BooleanField(default=False) # deprecated
|
||||
title = models.TextField(null=True, blank=True)
|
||||
needs_reboot = models.BooleanField(default=False) # deprecated
|
||||
installed = models.BooleanField(default=False)
|
||||
downloaded = models.BooleanField(default=False)
|
||||
description = models.TextField(null=True)
|
||||
description = models.TextField(null=True, blank=True)
|
||||
severity = models.CharField(max_length=255, null=True, blank=True)
|
||||
categories = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
category_ids = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
kb_article_ids = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
more_info_urls = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
support_url = models.TextField(null=True, blank=True)
|
||||
revision_number = models.IntegerField(null=True, blank=True)
|
||||
action = models.CharField(
|
||||
max_length=100, choices=PATCH_ACTION_CHOICES, default="nothing"
|
||||
)
|
||||
result = models.CharField(max_length=255, default="n/a")
|
||||
date_installed = models.DateTimeField(null=True)
|
||||
date_installed = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.agent.hostname} {self.kb}"
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
from time import sleep
|
||||
import asyncio
|
||||
import time
|
||||
from django.utils import timezone as djangotime
|
||||
from django.conf import settings
|
||||
import datetime as dt
|
||||
import pytz
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from typing import List
|
||||
|
||||
from agents.models import Agent
|
||||
from .models import WinUpdate
|
||||
@@ -16,31 +19,42 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
def auto_approve_updates_task():
|
||||
# scheduled task that checks and approves updates daily
|
||||
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
for agent in agents:
|
||||
agent.delete_superseded_updates()
|
||||
try:
|
||||
agent.approve_updates()
|
||||
except:
|
||||
continue
|
||||
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
online = [
|
||||
i
|
||||
for i in agents
|
||||
if i.status == "online" and pyver.parse(i.version) >= pyver.parse("1.3.0")
|
||||
]
|
||||
|
||||
for agent in online:
|
||||
|
||||
# check for updates on agent
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate",
|
||||
kwargs={"pk": agent.pk, "wait": False, "auto_approve": True},
|
||||
)
|
||||
chunks = (online[i : i + 40] for i in range(0, len(online), 40))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||
time.sleep(0.05)
|
||||
time.sleep(15)
|
||||
|
||||
|
||||
@app.task
|
||||
def check_agent_update_schedule_task():
|
||||
# scheduled task that installs updates on agents if enabled
|
||||
agents = Agent.objects.all()
|
||||
online = [i for i in agents if i.has_patches_pending and i.status == "online"]
|
||||
agents = Agent.objects.only("pk", "version", "last_seen", "overdue_time")
|
||||
online = [
|
||||
i
|
||||
for i in agents
|
||||
if pyver.parse(i.version) >= pyver.parse("1.3.0")
|
||||
and i.has_patches_pending
|
||||
and i.status == "online"
|
||||
]
|
||||
|
||||
for agent in online:
|
||||
agent.delete_superseded_updates()
|
||||
install = False
|
||||
patch_policy = agent.get_patch_policy()
|
||||
|
||||
@@ -98,117 +112,40 @@ def check_agent_update_schedule_task():
|
||||
if install:
|
||||
# initiate update on agent asynchronously and don't worry about ret code
|
||||
logger.info(f"Installing windows updates on {agent.salt_id}")
|
||||
agent.salt_api_async(func="win_agent.install_updates")
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
"guids": agent.get_approved_update_guids(),
|
||||
}
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
agent.patches_last_installed = djangotime.now()
|
||||
agent.save(update_fields=["patches_last_installed"])
|
||||
|
||||
|
||||
@app.task
|
||||
def check_for_updates_task(pk, wait=False, auto_approve=False):
|
||||
|
||||
if wait:
|
||||
sleep(120)
|
||||
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
ret = agent.salt_api_cmd(
|
||||
timeout=310,
|
||||
func="win_wua.list",
|
||||
arg="skip_installed=False",
|
||||
)
|
||||
|
||||
if ret == "timeout" or ret == "error":
|
||||
return
|
||||
|
||||
if isinstance(ret, str):
|
||||
err = ["unknown failure", "2147352567", "2145107934"]
|
||||
if any(x in ret.lower() for x in err):
|
||||
logger.warning(f"{agent.salt_id}: {ret}")
|
||||
return "failed"
|
||||
|
||||
guids = []
|
||||
try:
|
||||
for k in ret.keys():
|
||||
guids.append(k)
|
||||
except Exception as e:
|
||||
logger.error(f"{agent.salt_id}: {str(e)}")
|
||||
return
|
||||
|
||||
for i in guids:
|
||||
# check if existing update install / download status has changed
|
||||
if WinUpdate.objects.filter(agent=agent).filter(guid=i).exists():
|
||||
|
||||
update = WinUpdate.objects.filter(agent=agent).get(guid=i)
|
||||
|
||||
# salt will report an update as not installed even if it has been installed if a reboot is pending
|
||||
# ignore salt's return if the result field is 'success' as that means the agent has successfully installed the update
|
||||
if update.result != "success":
|
||||
if ret[i]["Installed"] != update.installed:
|
||||
update.installed = not update.installed
|
||||
update.save(update_fields=["installed"])
|
||||
|
||||
if ret[i]["Downloaded"] != update.downloaded:
|
||||
update.downloaded = not update.downloaded
|
||||
update.save(update_fields=["downloaded"])
|
||||
|
||||
# otherwise it's a new update
|
||||
else:
|
||||
WinUpdate(
|
||||
agent=agent,
|
||||
guid=i,
|
||||
kb=ret[i]["KBs"][0],
|
||||
mandatory=ret[i]["Mandatory"],
|
||||
title=ret[i]["Title"],
|
||||
needs_reboot=ret[i]["NeedsReboot"],
|
||||
installed=ret[i]["Installed"],
|
||||
downloaded=ret[i]["Downloaded"],
|
||||
description=ret[i]["Description"],
|
||||
severity=ret[i]["Severity"],
|
||||
).save()
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
|
||||
# win_wua.list doesn't always return everything
|
||||
# use win_wua.installed to check for any updates that it missed
|
||||
# and then change update status to match
|
||||
installed = agent.salt_api_cmd(
|
||||
timeout=60, func="win_wua.installed", arg="kbs_only=True"
|
||||
)
|
||||
|
||||
if installed == "timeout" or installed == "error":
|
||||
pass
|
||||
elif isinstance(installed, list):
|
||||
agent.winupdates.filter(kb__in=installed).filter(installed=False).update(
|
||||
installed=True, downloaded=True
|
||||
)
|
||||
|
||||
# check if reboot needed. returns bool
|
||||
needs_reboot = agent.salt_api_cmd(timeout=30, func="win_wua.get_needs_reboot")
|
||||
|
||||
if needs_reboot == "timeout" or needs_reboot == "error":
|
||||
pass
|
||||
elif isinstance(needs_reboot, bool) and needs_reboot:
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
else:
|
||||
agent.needs_reboot = False
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
# approve updates if specified
|
||||
if auto_approve:
|
||||
agent.approve_updates()
|
||||
|
||||
return "ok"
|
||||
def bulk_install_updates_task(pks: List[int]) -> None:
|
||||
q = Agent.objects.filter(pk__in=pks)
|
||||
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
|
||||
chunks = (agents[i : i + 40] for i in range(0, len(agents), 40))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
agent.delete_superseded_updates()
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
"guids": agent.get_approved_update_guids(),
|
||||
}
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
time.sleep(0.05)
|
||||
time.sleep(15)
|
||||
|
||||
|
||||
@app.task
|
||||
def bulk_check_for_updates_task(minions):
|
||||
# don't flood the celery queue
|
||||
chunks = (minions[i : i + 30] for i in range(0, len(minions), 30))
|
||||
def bulk_check_for_updates_task(pks: List[int]) -> None:
|
||||
q = Agent.objects.filter(pk__in=pks)
|
||||
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
|
||||
chunks = (agents[i : i + 40] for i in range(0, len(agents), 40))
|
||||
for chunk in chunks:
|
||||
for i in chunk:
|
||||
agent = Agent.objects.get(salt_id=i)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate",
|
||||
kwargs={"pk": agent.pk, "wait": False, "auto_approve": True},
|
||||
)
|
||||
sleep(30)
|
||||
for agent in chunk:
|
||||
agent.delete_superseded_updates()
|
||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||
time.sleep(0.05)
|
||||
time.sleep(15)
|
||||
|
||||
@@ -29,7 +29,7 @@ class TestWinUpdateViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("winupdate.tasks.check_for_updates_task.apply_async")
|
||||
""" @patch("winupdate.tasks.check_for_updates_task.apply_async")
|
||||
def test_run_update_scan(self, mock_task):
|
||||
|
||||
# test a call where agent doesn't exist
|
||||
@@ -46,9 +46,9 @@ class TestWinUpdateViews(TacticalTestCase):
|
||||
kwargs={"pk": agent.pk, "wait": False, "auto_approve": True},
|
||||
)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url) """
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
""" @patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_install_updates(self, mock_cmd):
|
||||
|
||||
# test a call where agent doesn't exist
|
||||
@@ -84,7 +84,7 @@ class TestWinUpdateViews(TacticalTestCase):
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url) """
|
||||
|
||||
def test_edit_policy(self):
|
||||
url = "/winupdate/editpolicy/"
|
||||
@@ -113,8 +113,9 @@ class WinupdateTasks(TacticalTestCase):
|
||||
)
|
||||
self.offline_agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
@patch("winupdate.tasks.check_for_updates_task.apply_async")
|
||||
def test_auto_approve_task(self, check_updates_task):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("time.sleep")
|
||||
def test_auto_approve_task(self, mock_sleep, nats_cmd):
|
||||
from .tasks import auto_approve_updates_task
|
||||
|
||||
# Setup data
|
||||
@@ -137,14 +138,14 @@ class WinupdateTasks(TacticalTestCase):
|
||||
auto_approve_updates_task()
|
||||
|
||||
# make sure the check_for_updates_task was run once for each online agent
|
||||
self.assertEqual(check_updates_task.call_count, 2)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
|
||||
# check if all of the created updates were approved
|
||||
winupdates = WinUpdate.objects.all()
|
||||
for update in winupdates:
|
||||
self.assertEqual(update.action, "approve")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
""" @patch("agents.models.Agent.salt_api_async")
|
||||
def test_check_agent_update_daily_schedule(self, agent_salt_cmd):
|
||||
from .tasks import check_agent_update_schedule_task
|
||||
|
||||
@@ -173,7 +174,7 @@ class WinupdateTasks(TacticalTestCase):
|
||||
|
||||
check_agent_update_schedule_task()
|
||||
agent_salt_cmd.assert_called_with(func="win_agent.install_updates")
|
||||
self.assertEquals(agent_salt_cmd.call_count, 2)
|
||||
self.assertEquals(agent_salt_cmd.call_count, 2) """
|
||||
|
||||
""" @patch("agents.models.Agent.salt_api_async")
|
||||
def test_check_agent_update_monthly_schedule(self, agent_salt_cmd):
|
||||
@@ -205,109 +206,3 @@ class WinupdateTasks(TacticalTestCase):
|
||||
check_agent_update_schedule_task()
|
||||
agent_salt_cmd.assert_called_with(func="win_agent.install_updates")
|
||||
self.assertEquals(agent_salt_cmd.call_count, 2) """
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_check_for_updates(self, salt_api_cmd):
|
||||
from .tasks import check_for_updates_task
|
||||
|
||||
# create a matching update returned from salt
|
||||
baker.make_recipe(
|
||||
"winupdate.approved_winupdate",
|
||||
agent=self.online_agents[0],
|
||||
kb="KB12341234",
|
||||
guid="GUID1",
|
||||
downloaded=True,
|
||||
severity="",
|
||||
installed=True,
|
||||
)
|
||||
|
||||
salt_success_return = {
|
||||
"GUID1": {
|
||||
"Title": "Update Title",
|
||||
"KBs": ["KB12341234"],
|
||||
"GUID": "GUID1",
|
||||
"Description": "Description",
|
||||
"Downloaded": False,
|
||||
"Installed": False,
|
||||
"Mandatory": False,
|
||||
"Severity": "",
|
||||
"NeedsReboot": True,
|
||||
},
|
||||
"GUID2": {
|
||||
"Title": "Update Title 2",
|
||||
"KBs": ["KB12341235"],
|
||||
"GUID": "GUID2",
|
||||
"Description": "Description",
|
||||
"Downloaded": False,
|
||||
"Installed": True,
|
||||
"Mandatory": False,
|
||||
"Severity": "",
|
||||
"NeedsReboot": True,
|
||||
},
|
||||
}
|
||||
|
||||
salt_kb_list = ["KB12341235"]
|
||||
|
||||
# mock failed attempt
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = check_for_updates_task(self.online_agents[0].pk)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=310,
|
||||
func="win_wua.list",
|
||||
arg="skip_installed=False",
|
||||
)
|
||||
self.assertFalse(ret)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# mock failed attempt
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = check_for_updates_task(self.online_agents[0].pk)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=310,
|
||||
func="win_wua.list",
|
||||
arg="skip_installed=False",
|
||||
)
|
||||
self.assertFalse(ret)
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# mock failed attempt
|
||||
salt_api_cmd.return_value = "unknown failure"
|
||||
ret = check_for_updates_task(self.online_agents[0].pk)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=310,
|
||||
func="win_wua.list",
|
||||
arg="skip_installed=False",
|
||||
)
|
||||
self.assertEquals(ret, "failed")
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# mock failed attempt at salt list updates with reboot
|
||||
salt_api_cmd.side_effect = [salt_success_return, "timeout", True]
|
||||
ret = check_for_updates_task(self.online_agents[0].pk)
|
||||
salt_api_cmd.assert_any_call(
|
||||
timeout=310,
|
||||
func="win_wua.list",
|
||||
arg="skip_installed=False",
|
||||
)
|
||||
salt_api_cmd.assert_any_call(
|
||||
timeout=60, func="win_wua.installed", arg="kbs_only=True"
|
||||
)
|
||||
|
||||
salt_api_cmd.assert_any_call(timeout=30, func="win_wua.get_needs_reboot")
|
||||
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# mock successful attempt without reboot
|
||||
salt_api_cmd.side_effect = [salt_success_return, salt_kb_list, False]
|
||||
ret = check_for_updates_task(self.online_agents[0].pk)
|
||||
salt_api_cmd.assert_any_call(
|
||||
timeout=310,
|
||||
func="win_wua.list",
|
||||
arg="skip_installed=False",
|
||||
)
|
||||
|
||||
salt_api_cmd.assert_any_call(
|
||||
timeout=60, func="win_wua.installed", arg="kbs_only=True"
|
||||
)
|
||||
|
||||
salt_api_cmd.assert_any_call(timeout=30, func="win_wua.get_needs_reboot")
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import asyncio
|
||||
from packaging import version as pyver
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.decorators import (
|
||||
api_view,
|
||||
authentication_classes,
|
||||
permission_classes,
|
||||
)
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
@@ -12,7 +10,6 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from agents.models import Agent
|
||||
from .models import WinUpdate
|
||||
from .serializers import UpdateSerializer, ApprovedUpdateSerializer
|
||||
from .tasks import check_for_updates_task
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
|
||||
@@ -25,30 +22,26 @@ def get_win_updates(request, pk):
|
||||
@api_view()
|
||||
def run_update_scan(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False, "auto_approve": True}
|
||||
)
|
||||
agent.delete_superseded_updates()
|
||||
if pyver.parse(agent.version) < pyver.parse("1.3.0"):
|
||||
return notify_error("Requires agent version 1.3.0 or greater")
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view()
|
||||
def install_updates(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=15, func="win_agent.install_updates")
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
elif r == "running":
|
||||
return notify_error(f"Updates are already being installed on {agent.hostname}")
|
||||
|
||||
# successful response: {'return': [{'SALT-ID': {'pid': 3316}}]}
|
||||
try:
|
||||
r["pid"]
|
||||
except (KeyError):
|
||||
return notify_error(str(r))
|
||||
agent.delete_superseded_updates()
|
||||
if pyver.parse(agent.version) < pyver.parse("1.3.0"):
|
||||
return notify_error("Requires agent version 1.3.0 or greater")
|
||||
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
"guids": agent.get_approved_update_guids(),
|
||||
}
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
return Response(f"Patches will now be installed on {agent.hostname}")
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="6"
|
||||
SCRIPT_VERSION="7"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
@@ -61,7 +61,6 @@ sysd="/etc/systemd/system"
|
||||
|
||||
mkdir -p ${tmp_dir}/meshcentral/mongo
|
||||
mkdir ${tmp_dir}/postgres
|
||||
mkdir ${tmp_dir}/salt
|
||||
mkdir ${tmp_dir}/certs
|
||||
mkdir ${tmp_dir}/nginx
|
||||
mkdir ${tmp_dir}/systemd
|
||||
@@ -74,16 +73,13 @@ pg_dump --dbname=postgresql://"${POSTGRES_USER}":"${POSTGRES_PW}"@127.0.0.1:5432
|
||||
tar -czvf ${tmp_dir}/meshcentral/mesh.tar.gz --exclude=/meshcentral/node_modules /meshcentral
|
||||
mongodump --gzip --out=${tmp_dir}/meshcentral/mongo
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/salt/etc-salt.tar.gz -C /etc/salt .
|
||||
tar -czvf ${tmp_dir}/salt/srv-salt.tar.gz -C /srv/salt .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/certs/etc-letsencrypt.tar.gz -C /etc/letsencrypt .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/nginx/etc-nginx.tar.gz -C /etc/nginx .
|
||||
|
||||
sudo tar -czvf ${tmp_dir}/confd/etc-confd.tar.gz -C /etc/conf.d .
|
||||
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/celery-winupdate.service ${sysd}/meshcentral.service ${sysd}/nats.service ${sysd}/natsapi.service ${tmp_dir}/systemd/
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/meshcentral.service ${sysd}/nats.service ${sysd}/natsapi.service ${tmp_dir}/systemd/
|
||||
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
cp /rmm/api/tacticalrmm/tacticalrmm/local_settings.py /rmm/api/tacticalrmm/app.ini ${tmp_dir}/rmm/
|
||||
|
||||
@@ -63,18 +63,6 @@ server {
|
||||
alias ${TACTICAL_DIR}/api/tacticalrmm/private/;
|
||||
}
|
||||
|
||||
location /saltscripts/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias ${TACTICAL_DIR}/scripts/userdefined/;
|
||||
}
|
||||
|
||||
location /builtin/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${APP_HOST}";
|
||||
alias ${TACTICAL_DIR}/scripts/;
|
||||
}
|
||||
|
||||
location ~ ^/(natsapi) {
|
||||
deny all;
|
||||
}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
FROM ubuntu:20.04
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV SALT_USER saltapi
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y ca-certificates wget gnupg2 tzdata supervisor && \
|
||||
wget -O - https://repo.saltstack.com/py3/ubuntu/20.04/amd64/latest/SALTSTACK-GPG-KEY.pub | apt-key add - && \
|
||||
echo 'deb http://repo.saltstack.com/py3/ubuntu/20.04/amd64/latest focal main' | tee /etc/apt/sources.list.d/saltstack.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y salt-master salt-api && \
|
||||
mkdir -p /var/log/supervisor && \
|
||||
sed -i 's/msgpack_kwargs = {"raw": six.PY2}/msgpack_kwargs = {"raw": six.PY2, "max_buffer_size": 2147483647}/g' /usr/lib/python3/dist-packages/salt/transport/ipc.py && \
|
||||
adduser --no-create-home --disabled-password --gecos "" ${SALT_USER}
|
||||
|
||||
EXPOSE 8123 4505 4506
|
||||
|
||||
COPY docker/containers/tactical-salt/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
@@ -1,64 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${SALT_USER:='saltapi'}"
|
||||
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
|
||||
SALT_PASS=$(cat ${TACTICAL_DIR}/tmp/salt_pass)
|
||||
|
||||
echo "${SALT_USER}:${SALT_PASS}" | chpasswd
|
||||
|
||||
cherrypy_config="$(cat << EOF
|
||||
file_roots:
|
||||
base:
|
||||
- /srv/salt
|
||||
- ${TACTICAL_DIR}
|
||||
timeout: 20
|
||||
gather_job_timeout: 25
|
||||
max_event_size: 30485760
|
||||
external_auth:
|
||||
pam:
|
||||
${SALT_USER}:
|
||||
- .*
|
||||
- '@runner'
|
||||
- '@wheel'
|
||||
- '@jobs'
|
||||
rest_cherrypy:
|
||||
port: 8123
|
||||
disable_ssl: True
|
||||
max_request_body_size: 30485760
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${cherrypy_config}" > /etc/salt/master.d/rmm-salt.conf
|
||||
|
||||
supervisor_config="$(cat << EOF
|
||||
[supervisord]
|
||||
nodaemon=true
|
||||
[include]
|
||||
files = /etc/supervisor/conf.d/*.conf
|
||||
|
||||
[program:salt-master]
|
||||
command=/bin/bash -c "salt-master -l info"
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
|
||||
[program:salt-api]
|
||||
command=/bin/bash -c "salt-api -l info"
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${supervisor_config}" > /etc/supervisor/conf.d/supervisor.conf
|
||||
|
||||
# run salt and salt master
|
||||
/usr/bin/supervisord
|
||||
@@ -38,7 +38,6 @@ ENV PATH "${VIRTUAL_ENV}/bin:${TACTICAL_GO_DIR}/go/bin:$PATH"
|
||||
# copy files from repo
|
||||
COPY api/tacticalrmm ${TACTICAL_TMP_DIR}/api
|
||||
COPY scripts ${TACTICAL_TMP_DIR}/scripts
|
||||
COPY _modules ${TACTICAL_TMP_DIR}/_modules
|
||||
|
||||
# copy go install from build stage
|
||||
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
|
||||
@@ -9,8 +9,6 @@ set -e
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${SALT_HOST:=tactical-salt}"
|
||||
: "${SALT_USER:=saltapi}"
|
||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
@@ -53,14 +51,6 @@ if [ "$1" = 'tactical-init' ]; then
|
||||
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||
ADMINURL=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 70 | head -n 1)
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
# write salt pass to tmp dir
|
||||
if [ ! -f "${TACTICAL__DIR}/tmp/salt_pass" ]; then
|
||||
SALT_PASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||
echo "${SALT_PASS}" > ${TACTICAL_DIR}/tmp/salt_pass
|
||||
else
|
||||
SALT_PASS=$(cat ${TACTICAL_DIR}/tmp/salt_pass)
|
||||
fi
|
||||
|
||||
localvars="$(cat << EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
@@ -111,9 +101,6 @@ if not DEBUG:
|
||||
)
|
||||
})
|
||||
|
||||
SALT_USERNAME = '${SALT_USER}'
|
||||
SALT_PASSWORD = '${SALT_PASS}'
|
||||
SALT_HOST = '${SALT_HOST}'
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
@@ -176,8 +163,3 @@ if [ "$1" = 'tactical-celerybeat' ]; then
|
||||
test -f "${TACTICAL_DIR}/api/celerybeat.pid" && rm "${TACTICAL_DIR}/api/celerybeat.pid"
|
||||
celery -A tacticalrmm beat -l info
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerywinupdate' ]; then
|
||||
check_tactical_ready
|
||||
celery -A tacticalrmm worker -Q wupdate -l info
|
||||
fi
|
||||
|
||||
@@ -15,7 +15,6 @@ networks:
|
||||
# docker managed persistent volumes
|
||||
volumes:
|
||||
tactical_data:
|
||||
salt_data:
|
||||
postgres_data:
|
||||
mongo_data:
|
||||
mesh_data:
|
||||
@@ -63,19 +62,6 @@ services:
|
||||
- proxy
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
|
||||
# salt master and api
|
||||
tactical-salt:
|
||||
image: ${IMAGE_REPO}tactical-salt:${VERSION}
|
||||
restart: always
|
||||
ports:
|
||||
- "4505:4505"
|
||||
- "4506:4506"
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
- salt_data:/etc/salt
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
# nats
|
||||
tactical-nats:
|
||||
@@ -141,11 +127,9 @@ services:
|
||||
command: ["tactical-backend"]
|
||||
restart: always
|
||||
networks:
|
||||
proxy:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
api-db:
|
||||
redis:
|
||||
- proxy
|
||||
- api-db
|
||||
- redis
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
depends_on:
|
||||
@@ -199,18 +183,3 @@ services:
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-redis
|
||||
|
||||
# container for celery winupdate tasks
|
||||
tactical-celerywinupdate:
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celerywinupdate"]
|
||||
restart: always
|
||||
networks:
|
||||
- redis
|
||||
- proxy
|
||||
- api-db
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-redis
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
||||
DOCKER_IMAGES="tactical-nginx"
|
||||
DOCKER_IMAGES="tactical tactical-frontend tactical-nats tactical-nginx tactical-meshcentral"
|
||||
|
||||
cd ..
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ sudo certbot certonly --manual -d *.example.com --agree-tos --no-bootstrap --man
|
||||
|
||||
## Configure DNS and firewall
|
||||
|
||||
You will need to add DNS entries so that the three subdomains resolve to the IP of the docker host. There is a reverse proxy running that will route the hostnames to the correct container. On the host, you will need to ensure the firewall is open on tcp ports 80, 443, 4222, 4505, 4506.
|
||||
You will need to add DNS entries so that the three subdomains resolve to the IP of the docker host. There is a reverse proxy running that will route the hostnames to the correct container. On the host, you will need to ensure the firewall is open on tcp ports 80, 443 and 4222.
|
||||
|
||||
## Setting up the environment
|
||||
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
pids
|
||||
logs
|
||||
node_modules
|
||||
npm-debug.log
|
||||
coverage/
|
||||
run
|
||||
dist
|
||||
.DS_Store
|
||||
.nyc_output
|
||||
.basement
|
||||
config.local.js
|
||||
basement_dist
|
||||
@@ -1,41 +0,0 @@
|
||||
const { description } = require('../package')
|
||||
|
||||
module.exports = {
|
||||
base: '/tacticalrmm/',
|
||||
title: 'Tactical RMM',
|
||||
description: description,
|
||||
|
||||
head: [
|
||||
['meta', { name: 'theme-color', content: '#3eaf7c' }],
|
||||
['meta', { name: 'apple-mobile-web-app-capable', content: 'yes' }],
|
||||
['meta', { name: 'apple-mobile-web-app-status-bar-style', content: 'black' }]
|
||||
],
|
||||
themeConfig: {
|
||||
repo: '',
|
||||
editLinks: false,
|
||||
docsDir: '',
|
||||
editLinkText: '',
|
||||
lastUpdated: false,
|
||||
nav: [
|
||||
{
|
||||
text: 'Guide',
|
||||
link: '/guide/',
|
||||
}
|
||||
],
|
||||
sidebar: {
|
||||
'/guide/': [
|
||||
{
|
||||
title: 'Guide',
|
||||
collapsable: false,
|
||||
children: [
|
||||
'',
|
||||
]
|
||||
}
|
||||
],
|
||||
}
|
||||
},
|
||||
plugins: [
|
||||
//'@vuepress/plugin-back-to-top',
|
||||
//'@vuepress/plugin-medium-zoom',
|
||||
]
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
/**
|
||||
* Client app enhancement file.
|
||||
*
|
||||
* https://v1.vuepress.vuejs.org/guide/basic-config.html#app-level-enhancements
|
||||
*/
|
||||
|
||||
export default ({
|
||||
Vue, // the version of Vue being used in the VuePress app
|
||||
options, // the options for the root Vue instance
|
||||
router, // the router instance for the app
|
||||
siteData // site metadata
|
||||
}) => {
|
||||
// ...apply enhancements for the site.
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
/**
|
||||
* Custom Styles here.
|
||||
*
|
||||
* ref:https://v1.vuepress.vuejs.org/config/#index-styl
|
||||
*/
|
||||
|
||||
.home .hero img
|
||||
max-width 450px!important
|
||||
@@ -1,10 +0,0 @@
|
||||
/**
|
||||
* Custom palette here.
|
||||
*
|
||||
* ref:https://v1.vuepress.vuejs.org/zh/config/#palette-styl
|
||||
*/
|
||||
|
||||
$accentColor = #3eaf7c
|
||||
$textColor = #2c3e50
|
||||
$borderColor = #eaecef
|
||||
$codeBgColor = #282c34
|
||||
BIN
docs/docs/images/favicon.ico
Normal file
BIN
docs/docs/images/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 758 B |
BIN
docs/docs/images/onit.ico
Normal file
BIN
docs/docs/images/onit.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 48 KiB |
28
docs/docs/index.md
Normal file
28
docs/docs/index.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# Tactical RMM Documentation
|
||||
|
||||
[](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
|
||||
[](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://github.com/python/black)
|
||||
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django, Vue and Golang.
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
## [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
||||
|
||||
## Features
|
||||
|
||||
- Teamviewer-like remote desktop control
|
||||
- Real-time remote shell
|
||||
- Remote file browser (download and upload files)
|
||||
- Remote command and script execution (batch, powershell and python scripts)
|
||||
- Event log viewer
|
||||
- Services management
|
||||
- Windows patch management
|
||||
- Automated checks with email/SMS alerting (cpu, disk, memory, services, scripts, event logs)
|
||||
- Automated task runner (run scripts on a schedule)
|
||||
- Remote software installation via chocolatey
|
||||
- Software and hardware inventory
|
||||
10
docs/docs/stylesheets/extra.css
Normal file
10
docs/docs/stylesheets/extra.css
Normal file
@@ -0,0 +1,10 @@
|
||||
.md-header {
|
||||
background-color: black !important;
|
||||
color: white !important;
|
||||
}
|
||||
.md-search__input {
|
||||
background-color: white !important;
|
||||
}
|
||||
.md-search__icon[for=__search]{
|
||||
color: initial;
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
# Installation
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
home: true
|
||||
heroImage: https://v1.vuepress.vuejs.org/hero.png
|
||||
actionText: Documentation →
|
||||
actionLink: /guide/
|
||||
---
|
||||
@@ -238,12 +238,18 @@ sudo rm -f /var/log/celery/*
|
||||
sudo nginx -t
|
||||
```
|
||||
|
||||
10. Start services
|
||||
10. Edit `/etc/hosts` and make sure the line starting with 127.0.1.1 or 127.0.0.1 has your 3 subdomains in it like this:
|
||||
```bash
|
||||
127.0.0.1 localhost
|
||||
127.0.1.1 yourservername api.example.com rmm.example.com mesh.example.com
|
||||
```
|
||||
|
||||
11. Start services
|
||||
```bash
|
||||
for i in rmm celery celerybeat celery-winupdate salt-master salt-api nginx meshcentral; do sudo systemctl start $i; done
|
||||
```
|
||||
|
||||
11. Delete whatever `update.sh` script you currently have and download the latest one and run it
|
||||
12. Delete whatever `update.sh` script you currently have and download the latest one and run it
|
||||
```bash
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
|
||||
chmod +x update.sh
|
||||
|
||||
33
docs/mkdocs.yml
Normal file
33
docs/mkdocs.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
site_name: "Tactical RMM"
|
||||
nav:
|
||||
- Home: index.md
|
||||
site_description: "A remote monitoring and management tool for Windows computers"
|
||||
site_author: "wh1te909"
|
||||
|
||||
# Repository
|
||||
repo_name: "wh1te909/tacticalrmm"
|
||||
repo_url: "https://github.com/wh1te909/tacticalrmm"
|
||||
edit_uri: ""
|
||||
|
||||
theme:
|
||||
name: "material"
|
||||
custom_dir: "theme"
|
||||
logo: "images/onit.ico"
|
||||
favicon: "images/favicon.ico"
|
||||
language: "en"
|
||||
palette:
|
||||
primary: "white"
|
||||
accent: "indigo"
|
||||
extra_css:
|
||||
- stylesheets/extra.css
|
||||
extra:
|
||||
social:
|
||||
- icon: fontawesome/brands/github
|
||||
link: "https://github.com/wh1te909/tacticalrmm"
|
||||
markdown_extensions:
|
||||
- pymdownx.inlinehilite
|
||||
- admonition
|
||||
- codehilite:
|
||||
guess_lang: false
|
||||
- toc:
|
||||
permalink: true
|
||||
10783
docs/package-lock.json
generated
10783
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"name": "tacticalrmm",
|
||||
"description": "A remote monitoring and management tool",
|
||||
"private": true,
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"dev": "vuepress dev",
|
||||
"build": "vuepress build"
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"vuepress": "^1.5.3"
|
||||
}
|
||||
}
|
||||
4
docs/theme/main.html
vendored
Normal file
4
docs/theme/main.html
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{% extends "base.html" %}
|
||||
{% block site_nav %}
|
||||
{{ super() }}
|
||||
{% endblock %}
|
||||
70
docs/theme/partials/footer.html
vendored
Normal file
70
docs/theme/partials/footer.html
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
{% import "partials/language.html" as lang with context %}
|
||||
|
||||
<!-- Application footer -->
|
||||
<footer class="md-footer">
|
||||
|
||||
<!-- Link to previous and/or next page -->
|
||||
{% if page.previous_page or page.next_page %}
|
||||
<div class="md-footer-nav">
|
||||
<nav class="md-footer-nav__inner md-grid">
|
||||
|
||||
<!-- Link to previous page -->
|
||||
{% if page.previous_page %}
|
||||
<a href="{{ page.previous_page.url | url }}" title="{{ page.previous_page.title }}"
|
||||
class="md-flex md-footer-nav__link md-footer-nav__link--prev" rel="prev">
|
||||
<div class="md-flex__cell md-flex__cell--shrink">
|
||||
<i class="md-icon md-icon--arrow-back
|
||||
md-footer-nav__button"></i>
|
||||
</div>
|
||||
<div class="md-flex__cell md-flex__cell--stretch
|
||||
md-footer-nav__title">
|
||||
<span class="md-flex__ellipsis">
|
||||
<span class="md-footer-nav__direction">
|
||||
{{ lang.t("footer.previous") }}
|
||||
</span>
|
||||
{{ page.previous_page.title }}
|
||||
</span>
|
||||
</div>
|
||||
</a>
|
||||
{% endif %}
|
||||
|
||||
<!-- Link to next page -->
|
||||
{% if page.next_page %}
|
||||
<a href="{{ page.next_page.url | url }}" title="{{ page.next_page.title }}"
|
||||
class="md-flex md-footer-nav__link md-footer-nav__link--next" rel="next">
|
||||
<div class="md-flex__cell md-flex__cell--stretch
|
||||
md-footer-nav__title">
|
||||
<span class="md-flex__ellipsis">
|
||||
<span class="md-footer-nav__direction">
|
||||
{{ lang.t("footer.next") }}
|
||||
</span>
|
||||
{{ page.next_page.title }}
|
||||
</span>
|
||||
</div>
|
||||
<div class="md-flex__cell md-flex__cell--shrink">
|
||||
<i class="md-icon md-icon--arrow-forward
|
||||
md-footer-nav__button"></i>
|
||||
</div>
|
||||
</a>
|
||||
{% endif %}
|
||||
</nav>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Further information -->
|
||||
<div class="md-footer-meta md-typeset">
|
||||
<div class="md-footer-meta__inner md-grid">
|
||||
|
||||
<!-- Copyright and theme information -->
|
||||
<div class="md-footer-copyright">
|
||||
{% if config.copyright %}
|
||||
<div class="md-footer-copyright__highlight">
|
||||
{{ config.copyright }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<!-- Social links -->
|
||||
{% include "partials/social.html" %}
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
7
go.mod
7
go.mod
@@ -4,14 +4,13 @@ go 1.15
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/go-resty/resty/v2 v2.3.0
|
||||
github.com/go-resty/resty/v2 v2.4.0
|
||||
github.com/josephspurrier/goversioninfo v1.2.0
|
||||
github.com/kr/pretty v0.1.0 // indirect
|
||||
github.com/nats-io/nats.go v1.10.1-0.20210107160453-a133396829fc
|
||||
github.com/ugorji/go/codec v1.2.2
|
||||
github.com/wh1te909/rmmagent v1.2.0
|
||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102 // indirect
|
||||
golang.org/x/sys v0.0.0-20201113233024-12cec1faf1ba // indirect
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210121224121-abcecefe6da5
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect
|
||||
)
|
||||
|
||||
17
go.sum
17
go.sum
@@ -11,6 +11,8 @@ github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMo
|
||||
github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=
|
||||
github.com/go-resty/resty/v2 v2.3.0 h1:JOOeAvjSlapTT92p8xiS19Zxev1neGikoHsXJeOq8So=
|
||||
github.com/go-resty/resty/v2 v2.3.0/go.mod h1:UpN9CgLZNsv4e9XG50UU8xdI0F43UQ4HmxLBDwaroHU=
|
||||
github.com/go-resty/resty/v2 v2.4.0 h1:s6TItTLejEI+2mn98oijC5w/Rk2YU+OA6x0mnZN6r6k=
|
||||
github.com/go-resty/resty/v2 v2.4.0/go.mod h1:B88+xCTEwvfD94NOuE6GS1wMlnoKNY8eEiNizfNwOwA=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
@@ -83,6 +85,7 @@ github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/tc-hib/goversioninfo v0.0.0-20200813185747-90ffbaa484a7/go.mod h1:NaPIGx19A2KXQEoek0x88NbM0lNgRooZS0xmrETzcjI=
|
||||
github.com/tc-hib/rsrc v0.9.1/go.mod h1:JGDB/TLOdMTvEEvjv3yetUTFnjXWYLbZDDeH4BTXG/8=
|
||||
github.com/tc-hib/rsrc v0.9.2/go.mod h1:vUZqBwu0vX+ueZH/D5wEvihBZfON5BrWCg6Orbfq7A4=
|
||||
github.com/ugorji/go v1.2.0/go.mod h1:1ny++pKMXhLWrwWV5Nf+CbOuZJhMoaFD+0GMFfd8fEc=
|
||||
github.com/ugorji/go v1.2.2 h1:60ZHIOcsJlo3bJm9CbTVu7OSqT2mxaEmyQbK2NwCkn0=
|
||||
github.com/ugorji/go v1.2.2/go.mod h1:bitgyERdV7L7Db/Z5gfd5v2NQMNhhiFiZwpgMw2SP7k=
|
||||
@@ -98,6 +101,14 @@ github.com/wh1te909/rmmagent v1.1.13-0.20210112033642-9b310c2c7f53 h1:Q47sibbW09
|
||||
github.com/wh1te909/rmmagent v1.1.13-0.20210112033642-9b310c2c7f53/go.mod h1:05MQOAiC/kGvJjDlCOjaTsMNpf6wZFqOTkHqK0ATfW0=
|
||||
github.com/wh1te909/rmmagent v1.2.0 h1:dM/juD7k6Oa0lEKsvbNPgjc1wVC6uQtNzQoIqVuuxSQ=
|
||||
github.com/wh1te909/rmmagent v1.2.0/go.mod h1:05MQOAiC/kGvJjDlCOjaTsMNpf6wZFqOTkHqK0ATfW0=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210118235958-bd6606570a6f h1:lhcD2yJauZ8TyYCxYvSv/CPnUhiTrxwydPTESfPkyuc=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210118235958-bd6606570a6f/go.mod h1:05MQOAiC/kGvJjDlCOjaTsMNpf6wZFqOTkHqK0ATfW0=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210119030741-08ec2f919198 h1:lPxk5AEr/2y8txGtvbQgW0rofZ7RFaJBYmS8rLIxoVQ=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210119030741-08ec2f919198/go.mod h1:05MQOAiC/kGvJjDlCOjaTsMNpf6wZFqOTkHqK0ATfW0=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210119225811-d3b8795ce1d7 h1:ctMUmZtlI2dH1WCndTFPOueWgYd18n+onYsnMKT/lns=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210119225811-d3b8795ce1d7/go.mod h1:TG09pCLQZcN5jyrokVty3eHImponjh5nMmifru9RPeY=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210121224121-abcecefe6da5 h1:md2uqZE2Too7mRvWCvA7vDpdpFP1bMEKWAfrIa0ARiA=
|
||||
github.com/wh1te909/rmmagent v1.2.2-0.20210121224121-abcecefe6da5/go.mod h1:TG09pCLQZcN5jyrokVty3eHImponjh5nMmifru9RPeY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
@@ -111,6 +122,10 @@ golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/
|
||||
golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102 h1:42cLlJJdEh+ySyeUUbEQ5bsTiq8voBeTuweGVkY6Puw=
|
||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20201224014010-6772e930b67b h1:iFwSg7t5GZmB/Q5TjiEAsdoLDrdJRC1RiF2WhuV29Qw=
|
||||
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew=
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -128,6 +143,8 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20201024232916-9f70ab9862d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201113233024-12cec1faf1ba h1:xmhUJGQGbxlod18iJGqVEp9cHIPLl7QiX2aA3to708s=
|
||||
golang.org/x/sys v0.0.0-20201113233024-12cec1faf1ba/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
|
||||
139
install.sh
139
install.sh
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="33"
|
||||
SCRIPT_VERSION="34"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh'
|
||||
|
||||
sudo apt install -y curl wget
|
||||
@@ -76,11 +76,11 @@ postgresql_repo="deb [arch=amd64] https://apt.postgresql.org/pub/repos/apt/ $cod
|
||||
sudo systemctl restart systemd-journald.service
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
SALTPW=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||
ADMINURL=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 70 | head -n 1)
|
||||
MESHPASSWD=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 25 | head -n 1)
|
||||
pgusername=$(cat /dev/urandom | tr -dc 'a-z' | fold -w 8 | head -n 1)
|
||||
pgpw=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
|
||||
meshusername=$(cat /dev/urandom | tr -dc 'a-z' | fold -w 8 | head -n 1)
|
||||
|
||||
cls() {
|
||||
printf "\033c"
|
||||
@@ -163,9 +163,6 @@ if echo "$IPV4" | grep -qE '^(10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|192
|
||||
BEHIND_NAT=true
|
||||
fi
|
||||
|
||||
echo -ne "${YELLOW}Create a username for meshcentral${NC}: "
|
||||
read meshusername
|
||||
|
||||
sudo apt install -y software-properties-common
|
||||
sudo apt update
|
||||
sudo apt install -y certbot openssl
|
||||
@@ -184,11 +181,6 @@ CERT_PUB_KEY=/etc/letsencrypt/live/${rootdomain}/fullchain.pem
|
||||
sudo chown ${USER}:${USER} -R /etc/letsencrypt
|
||||
sudo chmod 775 -R /etc/letsencrypt
|
||||
|
||||
print_green 'Creating saltapi user'
|
||||
|
||||
sudo adduser --no-create-home --disabled-password --gecos "" saltapi
|
||||
echo "saltapi:${SALTPW}" | sudo chpasswd
|
||||
|
||||
print_green 'Installing golang'
|
||||
|
||||
sudo mkdir -p /usr/local/rmmgo
|
||||
@@ -239,7 +231,7 @@ sudo systemctl restart mongod
|
||||
print_green 'Installing python, redis and git'
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y python3-venv python3-dev python3-pip python3-cherrypy3 python3-setuptools python3-wheel ca-certificates redis git
|
||||
sudo apt install -y python3-venv python3-dev python3-pip python3-setuptools python3-wheel ca-certificates redis git
|
||||
|
||||
print_green 'Installing postgresql'
|
||||
|
||||
@@ -359,12 +351,10 @@ if not DEBUG:
|
||||
)
|
||||
})
|
||||
|
||||
SALT_USERNAME = "saltapi"
|
||||
SALT_PASSWORD = "${SALTPW}"
|
||||
SALT_HOST = "127.0.0.1"
|
||||
MESH_USERNAME = "${meshusername}"
|
||||
MESH_SITE = "https://${meshdomain}"
|
||||
REDIS_HOST = "localhost"
|
||||
KEEP_SALT = False
|
||||
EOF
|
||||
)"
|
||||
echo "${localvars}" > /rmm/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
@@ -531,18 +521,6 @@ server {
|
||||
alias /rmm/api/tacticalrmm/tacticalrmm/private/;
|
||||
}
|
||||
|
||||
location /saltscripts/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${frontenddomain}";
|
||||
alias /srv/salt/scripts/userdefined/;
|
||||
}
|
||||
|
||||
location /builtin/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://${frontenddomain}";
|
||||
alias /srv/salt/scripts/;
|
||||
}
|
||||
|
||||
location ~ ^/(natsapi) {
|
||||
allow 127.0.0.1;
|
||||
deny all;
|
||||
@@ -602,46 +580,6 @@ echo "${nginxmesh}" | sudo tee /etc/nginx/sites-available/meshcentral.conf > /de
|
||||
sudo ln -s /etc/nginx/sites-available/rmm.conf /etc/nginx/sites-enabled/rmm.conf
|
||||
sudo ln -s /etc/nginx/sites-available/meshcentral.conf /etc/nginx/sites-enabled/meshcentral.conf
|
||||
|
||||
print_green 'Installing Salt Master'
|
||||
wget -O - 'https://repo.saltstack.com/py3/'$osname'/'$fullrelno'/amd64/latest/SALTSTACK-GPG-KEY.pub' | sudo apt-key add -
|
||||
echo 'deb http://repo.saltstack.com/py3/'$osname'/'$fullrelno'/amd64/latest '$codename' main' | sudo tee /etc/apt/sources.list.d/saltstack.list
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y salt-master
|
||||
|
||||
print_green 'Waiting 10 seconds for salt to start'
|
||||
sleep 10
|
||||
|
||||
saltvars="$(cat << EOF
|
||||
timeout: 20
|
||||
gather_job_timeout: 25
|
||||
max_event_size: 30485760
|
||||
external_auth:
|
||||
pam:
|
||||
saltapi:
|
||||
- .*
|
||||
- '@runner'
|
||||
- '@wheel'
|
||||
- '@jobs'
|
||||
|
||||
rest_cherrypy:
|
||||
port: 8123
|
||||
disable_ssl: True
|
||||
max_request_body_size: 30485760
|
||||
|
||||
EOF
|
||||
)"
|
||||
echo "${saltvars}" | sudo tee /etc/salt/master.d/rmm-salt.conf > /dev/null
|
||||
|
||||
# fix the stupid 1 MB limit present in msgpack 0.6.2, which btw was later changed to 100 MB in msgpack 1.0.0
|
||||
# but 0.6.2 is the default on ubuntu 20
|
||||
sudo sed -i 's/msgpack_kwargs = {"raw": six.PY2}/msgpack_kwargs = {"raw": six.PY2, "max_buffer_size": 2147483647}/g' /usr/lib/python3/dist-packages/salt/transport/ipc.py
|
||||
|
||||
|
||||
|
||||
print_green 'Installing Salt API'
|
||||
sudo apt install -y salt-api
|
||||
|
||||
sudo mkdir /etc/conf.d
|
||||
|
||||
celeryservice="$(cat << EOF
|
||||
@@ -676,7 +614,7 @@ CELERY_APP="tacticalrmm"
|
||||
|
||||
CELERYD_MULTI="multi"
|
||||
|
||||
CELERYD_OPTS="--time-limit=2900 --autoscale=50,5"
|
||||
CELERYD_OPTS="--time-limit=9999 --autoscale=100,5"
|
||||
|
||||
CELERYD_PID_FILE="/rmm/api/tacticalrmm/%n.pid"
|
||||
CELERYD_LOG_FILE="/var/log/celery/%n%I.log"
|
||||
@@ -688,44 +626,6 @@ EOF
|
||||
)"
|
||||
echo "${celeryconf}" | sudo tee /etc/conf.d/celery.conf > /dev/null
|
||||
|
||||
celerywinupdatesvc="$(cat << EOF
|
||||
[Unit]
|
||||
Description=Celery WinUpdate Service V2
|
||||
After=network.target redis-server.service postgresql.service
|
||||
|
||||
[Service]
|
||||
Type=forking
|
||||
User=${USER}
|
||||
Group=${USER}
|
||||
EnvironmentFile=/etc/conf.d/celery-winupdate.conf
|
||||
WorkingDirectory=/rmm/api/tacticalrmm
|
||||
ExecStart=/bin/sh -c '\${CELERY_BIN} -A \$CELERY_APP multi start \$CELERYD_NODES --pidfile=\${CELERYD_PID_FILE} --logfile=\${CELERYD_LOG_FILE} --loglevel="\${CELERYD_LOG_LEVEL}" -Q wupdate \$CELERYD_OPTS'
|
||||
ExecStop=/bin/sh -c '\${CELERY_BIN} multi stopwait \$CELERYD_NODES --pidfile=\${CELERYD_PID_FILE} --loglevel="\${CELERYD_LOG_LEVEL}"'
|
||||
ExecReload=/bin/sh -c '\${CELERY_BIN} -A \$CELERY_APP multi restart \$CELERYD_NODES --pidfile=\${CELERYD_PID_FILE} --logfile=\${CELERYD_LOG_FILE} --loglevel="\${CELERYD_LOG_LEVEL}" -Q wupdate \$CELERYD_OPTS'
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
)"
|
||||
echo "${celerywinupdatesvc}" | sudo tee /etc/systemd/system/celery-winupdate.service > /dev/null
|
||||
|
||||
celerywinupdate="$(cat << EOF
|
||||
CELERYD_NODES="w2"
|
||||
|
||||
CELERY_BIN="/rmm/api/env/bin/celery"
|
||||
CELERY_APP="tacticalrmm"
|
||||
CELERYD_MULTI="multi"
|
||||
|
||||
CELERYD_OPTS="--time-limit=4000 --autoscale=40,1"
|
||||
|
||||
CELERYD_PID_FILE="/rmm/api/tacticalrmm/%n.pid"
|
||||
CELERYD_LOG_FILE="/var/log/celery/%n%I.log"
|
||||
CELERYD_LOG_LEVEL="ERROR"
|
||||
EOF
|
||||
)"
|
||||
echo "${celerywinupdate}" | sudo tee /etc/conf.d/celery-winupdate.conf > /dev/null
|
||||
|
||||
celerybeatservice="$(cat << EOF
|
||||
[Unit]
|
||||
@@ -748,21 +648,12 @@ EOF
|
||||
)"
|
||||
echo "${celerybeatservice}" | sudo tee /etc/systemd/system/celerybeat.service > /dev/null
|
||||
|
||||
sudo mkdir -p /srv/salt
|
||||
sudo cp -r /rmm/_modules /srv/salt/
|
||||
sudo cp -r /rmm/scripts /srv/salt/
|
||||
sudo mkdir /srv/salt/scripts/userdefined
|
||||
sudo chown ${USER}:${USER} -R /srv/salt/
|
||||
sudo chown ${USER}:www-data /srv/salt/scripts/userdefined
|
||||
sudo chmod 750 /srv/salt/scripts/userdefined
|
||||
sudo chown ${USER}:${USER} -R /etc/conf.d/
|
||||
|
||||
meshservice="$(cat << EOF
|
||||
[Unit]
|
||||
Description=MeshCentral Server
|
||||
After=network.target
|
||||
After=mongod.service
|
||||
After=nginx.service
|
||||
After=network.target mongod.service nginx.service
|
||||
[Service]
|
||||
Type=simple
|
||||
LimitNOFILE=1000000
|
||||
@@ -782,12 +673,6 @@ echo "${meshservice}" | sudo tee /etc/systemd/system/meshcentral.service > /dev/
|
||||
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
|
||||
sudo systemctl enable salt-master
|
||||
sudo systemctl enable salt-api
|
||||
|
||||
sudo systemctl restart salt-api
|
||||
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.npm
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.config
|
||||
|
||||
@@ -844,7 +729,7 @@ sudo ln -s /etc/nginx/sites-available/frontend.conf /etc/nginx/sites-enabled/fro
|
||||
|
||||
print_green 'Enabling Services'
|
||||
|
||||
for i in rmm.service celery.service celerybeat.service celery-winupdate.service nginx
|
||||
for i in rmm.service celery.service celerybeat.service nginx
|
||||
do
|
||||
sudo systemctl enable ${i}
|
||||
sudo systemctl stop ${i}
|
||||
@@ -912,17 +797,12 @@ sudo systemctl start nats.service
|
||||
|
||||
|
||||
print_green 'Restarting services'
|
||||
for i in rmm.service celery.service celerybeat.service celery-winupdate.service natsapi.service
|
||||
for i in rmm.service celery.service celerybeat.service natsapi.service
|
||||
do
|
||||
sudo systemctl stop ${i}
|
||||
sudo systemctl start ${i}
|
||||
done
|
||||
|
||||
print_green 'Restarting salt-master and waiting 10 seconds'
|
||||
sudo systemctl restart salt-master
|
||||
sleep 10
|
||||
sudo systemctl restart salt-api
|
||||
|
||||
printf >&2 "${YELLOW}%0.s*${NC}" {1..80}
|
||||
printf >&2 "\n\n"
|
||||
printf >&2 "${YELLOW}Installation complete!${NC}\n\n"
|
||||
@@ -931,6 +811,7 @@ echo ${MESHEXE} | sed 's/{.*}//'
|
||||
printf >&2 "${NC}\n\n"
|
||||
printf >&2 "${YELLOW}Access your rmm at: ${GREEN}https://${frontenddomain}${NC}\n\n"
|
||||
printf >&2 "${YELLOW}Django admin url: ${GREEN}https://${rmmdomain}/${ADMINURL}${NC}\n\n"
|
||||
printf >&2 "${YELLOW}MeshCentral username: ${GREEN}${meshusername}${NC}\n"
|
||||
printf >&2 "${YELLOW}MeshCentral password: ${GREEN}${MESHPASSWD}${NC}\n\n"
|
||||
|
||||
if [ "$BEHIND_NAT" = true ]; then
|
||||
@@ -938,7 +819,7 @@ if [ "$BEHIND_NAT" = true ]; then
|
||||
echo -ne "${GREEN}If you will be accessing the web interface of the RMM from the same LAN as this server,${NC}\n"
|
||||
echo -ne "${GREEN}you'll need to make sure your 3 subdomains resolve to ${IPV4}${NC}\n"
|
||||
echo -ne "${GREEN}This also applies to any agents that will be on the same local network as the rmm.${NC}\n"
|
||||
echo -ne "${GREEN}You'll also need to setup port forwarding in your router on ports 80, 443, 4505, 4506 and 4222 tcp.${NC}\n\n"
|
||||
echo -ne "${GREEN}You'll also need to setup port forwarding in your router on ports 80, 443 and 4222 tcp.${NC}\n\n"
|
||||
fi
|
||||
|
||||
printf >&2 "${YELLOW}Please refer to the github README for next steps${NC}\n\n"
|
||||
|
||||
4
main.go
4
main.go
@@ -9,7 +9,7 @@ import (
|
||||
"github.com/wh1te909/tacticalrmm/natsapi"
|
||||
)
|
||||
|
||||
var version = "1.0.0"
|
||||
var version = "1.0.2"
|
||||
|
||||
func main() {
|
||||
ver := flag.Bool("version", false, "Prints version")
|
||||
@@ -23,5 +23,5 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
api.Listen(*apiHost, *natsHost, *debug)
|
||||
api.Listen(*apiHost, *natsHost, version, *debug)
|
||||
}
|
||||
|
||||
@@ -40,16 +40,15 @@ func getAPI(apihost, natshost string) (string, string, error) {
|
||||
return "", "", errors.New("unable to parse api from nginx conf")
|
||||
}
|
||||
|
||||
func Listen(apihost, natshost string, debug bool) {
|
||||
func Listen(apihost, natshost, version string, debug bool) {
|
||||
api, natsurl, err := getAPI(apihost, natshost)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
if debug {
|
||||
log.Println("Api base url: ", api)
|
||||
log.Println("Nats connection url: ", natsurl)
|
||||
}
|
||||
log.Printf("Tactical Nats API Version %s\n", version)
|
||||
log.Println("Api base url: ", api)
|
||||
log.Println("Nats connection url: ", natsurl)
|
||||
|
||||
rClient.SetHostURL(api)
|
||||
rClient.SetTimeout(30 * time.Second)
|
||||
@@ -89,6 +88,13 @@ func Listen(apihost, natshost string, debug bool) {
|
||||
rClient.R().SetBody(p).Patch("/checkin/")
|
||||
}
|
||||
}()
|
||||
case "startup":
|
||||
go func() {
|
||||
var p *rmm.CheckIn
|
||||
if err := dec.Decode(&p); err == nil {
|
||||
rClient.R().SetBody(p).Post("/checkin/")
|
||||
}
|
||||
}()
|
||||
case "osinfo":
|
||||
go func() {
|
||||
var p *rmm.CheckInOS
|
||||
@@ -138,6 +144,34 @@ func Listen(apihost, natshost string, debug bool) {
|
||||
rClient.R().SetBody(p).Post("/syncmesh/")
|
||||
}
|
||||
}()
|
||||
case "getwinupdates":
|
||||
go func() {
|
||||
var p *rmm.WinUpdateResult
|
||||
if err := dec.Decode(&p); err == nil {
|
||||
rClient.R().SetBody(p).Post("/winupdates/")
|
||||
}
|
||||
}()
|
||||
case "winupdateresult":
|
||||
go func() {
|
||||
var p *rmm.WinUpdateInstallResult
|
||||
if err := dec.Decode(&p); err == nil {
|
||||
rClient.R().SetBody(p).Patch("/winupdates/")
|
||||
}
|
||||
}()
|
||||
case "needsreboot":
|
||||
go func() {
|
||||
var p *rmm.AgentNeedsReboot
|
||||
if err := dec.Decode(&p); err == nil {
|
||||
rClient.R().SetBody(p).Put("/winupdates/")
|
||||
}
|
||||
}()
|
||||
case "chocoinstall":
|
||||
go func() {
|
||||
var p *rmm.ChocoInstalled
|
||||
if err := dec.Decode(&p); err == nil {
|
||||
rClient.R().SetBody(p).Post("/choco/")
|
||||
}
|
||||
}()
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
Binary file not shown.
54
restore.sh
54
restore.sh
@@ -7,7 +7,7 @@ pgpw="hunter2"
|
||||
|
||||
#####################################################
|
||||
|
||||
SCRIPT_VERSION="12"
|
||||
SCRIPT_VERSION="13"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh'
|
||||
|
||||
sudo apt install -y curl wget
|
||||
@@ -166,15 +166,9 @@ print_green 'Restoring systemd services'
|
||||
sudo cp $tmp_dir/systemd/* /etc/systemd/system/
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
print_green 'Restoring saltapi user'
|
||||
|
||||
SALTPW=$(grep SALT_PASSWORD $tmp_dir/rmm/local_settings.py | tr -d " \t" | sed 's/.*=//' | tr -d '"')
|
||||
sudo adduser --no-create-home --disabled-password --gecos "" saltapi
|
||||
echo "saltapi:${SALTPW}" | sudo chpasswd
|
||||
|
||||
print_green 'Installing python, redis and git'
|
||||
|
||||
sudo apt install -y python3.8-venv python3.8-dev python3-pip python3-cherrypy3 python3-setuptools python3-wheel ca-certificates redis git
|
||||
sudo apt install -y python3-venv python3-dev python3-pip python3-setuptools python3-wheel ca-certificates redis git
|
||||
|
||||
print_green 'Installing postgresql'
|
||||
|
||||
@@ -261,40 +255,6 @@ deactivate
|
||||
sudo systemctl enable nats.service
|
||||
sudo systemctl start nats.service
|
||||
|
||||
print_green 'Installing Salt Master'
|
||||
|
||||
wget -O - https://repo.saltstack.com/py3/ubuntu/20.04/amd64/latest/SALTSTACK-GPG-KEY.pub | sudo apt-key add -
|
||||
echo 'deb http://repo.saltstack.com/py3/ubuntu/20.04/amd64/latest focal main' | sudo tee /etc/apt/sources.list.d/saltstack.list
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y salt-master
|
||||
|
||||
print_green 'Waiting 10 seconds for salt to start'
|
||||
sleep 10
|
||||
|
||||
print_green 'Installing Salt API'
|
||||
sudo apt install -y salt-api
|
||||
|
||||
sudo sed -i 's/msgpack_kwargs = {"raw": six.PY2}/msgpack_kwargs = {"raw": six.PY2, "max_buffer_size": 2147483647}/g' /usr/lib/python3/dist-packages/salt/transport/ipc.py
|
||||
|
||||
sudo systemctl enable salt-master
|
||||
sudo systemctl enable salt-api
|
||||
sudo systemctl restart salt-api
|
||||
sleep 3
|
||||
|
||||
print_green 'Restoring salt keys'
|
||||
|
||||
sudo systemctl stop salt-master
|
||||
sudo systemctl stop salt-api
|
||||
sudo rm -rf /etc/salt
|
||||
sudo mkdir /etc/salt
|
||||
sudo tar -xzf $tmp_dir/salt/etc-salt.tar.gz -C /etc/salt
|
||||
sudo mkdir -p /srv/salt
|
||||
sudo tar -xzf $tmp_dir/salt/srv-salt.tar.gz -C /srv/salt
|
||||
sudo chown ${USER}:${USER} -R /srv/salt/
|
||||
sudo chown ${USER}:www-data /srv/salt/scripts/userdefined
|
||||
sudo chmod 750 /srv/salt/scripts/userdefined
|
||||
|
||||
print_green 'Restoring the frontend'
|
||||
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.npm
|
||||
@@ -310,18 +270,15 @@ sudo chown www-data:www-data -R /var/www/rmm/dist
|
||||
# reset perms
|
||||
sudo chown ${USER}:${USER} -R /rmm
|
||||
sudo chown ${USER}:${USER} /var/log/celery
|
||||
sudo chown ${USER}:${USER} -R /srv/salt/
|
||||
sudo chown ${USER}:${USER} -R /etc/conf.d/
|
||||
sudo chown ${USER}:www-data /srv/salt/scripts/userdefined
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.npm
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.config
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.cache
|
||||
sudo chmod 750 /srv/salt/scripts/userdefined
|
||||
|
||||
print_green 'Enabling Services'
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
for i in celery.service celerybeat.service celery-winupdate.service rmm.service nginx
|
||||
for i in celery.service celerybeat.service rmm.service nginx
|
||||
do
|
||||
sudo systemctl enable ${i}
|
||||
sudo systemctl stop ${i}
|
||||
@@ -337,11 +294,6 @@ print_green 'Starting natsapi'
|
||||
sudo systemctl enable natsapi.service
|
||||
sudo systemctl start natsapi.service
|
||||
|
||||
print_green 'Restarting salt and waiting 10 seconds'
|
||||
sudo systemctl restart salt-master
|
||||
sleep 10
|
||||
sudo systemctl restart salt-api
|
||||
|
||||
printf >&2 "${YELLOW}%0.s*${NC}" {1..80}
|
||||
printf >&2 "\n\n"
|
||||
printf >&2 "${YELLOW}Restore complete!${NC}\n\n"
|
||||
|
||||
111
update.sh
111
update.sh
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="102"
|
||||
SCRIPT_VERSION="104"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh'
|
||||
LATEST_SETTINGS_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
YELLOW='\033[1;33m'
|
||||
@@ -107,41 +107,6 @@ sudo systemctl enable celerybeat.service
|
||||
|
||||
fi
|
||||
|
||||
CHECK_CELERYWINUPDATE_V2=$(grep V2 /etc/systemd/system/celery-winupdate.service)
|
||||
if ! [[ $CHECK_CELERYWINUPDATE_V2 ]]; then
|
||||
printf >&2 "${GREEN}Updating celery-winupdate.service${NC}\n"
|
||||
sudo systemctl stop celery-winupdate.service
|
||||
sudo rm -f /etc/systemd/system/celery-winupdate.service
|
||||
|
||||
celerywinupdatesvc="$(cat << EOF
|
||||
[Unit]
|
||||
Description=Celery WinUpdate Service V2
|
||||
After=network.target redis-server.service postgresql.service
|
||||
|
||||
[Service]
|
||||
Type=forking
|
||||
User=${USER}
|
||||
Group=${USER}
|
||||
EnvironmentFile=/etc/conf.d/celery-winupdate.conf
|
||||
WorkingDirectory=/rmm/api/tacticalrmm
|
||||
ExecStart=/bin/sh -c '\${CELERY_BIN} -A \$CELERY_APP multi start \$CELERYD_NODES --pidfile=\${CELERYD_PID_FILE} --logfile=\${CELERYD_LOG_FILE} --loglevel="\${CELERYD_LOG_LEVEL}" -Q wupdate \$CELERYD_OPTS'
|
||||
ExecStop=/bin/sh -c '\${CELERY_BIN} multi stopwait \$CELERYD_NODES --pidfile=\${CELERYD_PID_FILE} --loglevel="\${CELERYD_LOG_LEVEL}"'
|
||||
ExecReload=/bin/sh -c '\${CELERY_BIN} -A \$CELERY_APP multi restart \$CELERYD_NODES --pidfile=\${CELERYD_PID_FILE} --logfile=\${CELERYD_LOG_FILE} --loglevel="\${CELERYD_LOG_LEVEL}" -Q wupdate \$CELERYD_OPTS'
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
)"
|
||||
echo "${celerywinupdatesvc}" | sudo tee /etc/systemd/system/celery-winupdate.service > /dev/null
|
||||
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable celery-winupdate.service
|
||||
|
||||
fi
|
||||
|
||||
|
||||
TMP_SETTINGS=$(mktemp -p "" "rmmsettings_XXXXXXXXXX")
|
||||
curl -s -L "${LATEST_SETTINGS_URL}" > ${TMP_SETTINGS}
|
||||
SETTINGS_FILE="/rmm/api/tacticalrmm/tacticalrmm/settings.py"
|
||||
@@ -158,7 +123,6 @@ fi
|
||||
LATEST_MESH_VER=$(grep "^MESH_VER" "$TMP_SETTINGS" | awk -F'[= "]' '{print $5}')
|
||||
LATEST_PIP_VER=$(grep "^PIP_VER" "$TMP_SETTINGS" | awk -F'[= "]' '{print $5}')
|
||||
LATEST_NPM_VER=$(grep "^NPM_VER" "$TMP_SETTINGS" | awk -F'[= "]' '{print $5}')
|
||||
LATEST_SALT_VER=$(grep "^SALT_MASTER_VER" "$TMP_SETTINGS" | awk -F'[= "]' '{print $5}')
|
||||
|
||||
CURRENT_PIP_VER=$(grep "^PIP_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||
CURRENT_NPM_VER=$(grep "^NPM_VER" "$SETTINGS_FILE" | awk -F'[= "]' '{print $5}')
|
||||
@@ -187,7 +151,15 @@ sudo systemctl daemon-reload
|
||||
sudo systemctl enable natsapi.service
|
||||
fi
|
||||
|
||||
for i in salt-master salt-api nginx nats natsapi rmm celery celerybeat celery-winupdate
|
||||
if [ -f /etc/systemd/system/celery-winupdate.service ]; then
|
||||
printf >&2 "${GREEN}Removing celery-winupdate.service${NC}\n"
|
||||
sudo systemctl stop celery-winupdate.service
|
||||
sudo systemctl disable celery-winupdate.service
|
||||
sudo rm -f /etc/systemd/system/celery-winupdate.service
|
||||
sudo systemctl daemon-reload
|
||||
fi
|
||||
|
||||
for i in nginx nats natsapi rmm celery celerybeat
|
||||
do
|
||||
printf >&2 "${GREEN}Stopping ${i} service...${NC}\n"
|
||||
sudo systemctl stop ${i}
|
||||
@@ -208,38 +180,47 @@ git reset --hard FETCH_HEAD
|
||||
git clean -df
|
||||
git pull
|
||||
|
||||
CHECK_SALT=$(sudo salt --version | grep ${LATEST_SALT_VER})
|
||||
if ! [[ $CHECK_SALT ]]; then
|
||||
printf >&2 "${GREEN}Updating salt${NC}\n"
|
||||
sudo apt update
|
||||
sudo apt install -y salt-master salt-api salt-common
|
||||
printf >&2 "${GREEN}Waiting for salt...${NC}\n"
|
||||
sleep 15
|
||||
sudo systemctl stop salt-master
|
||||
sudo systemctl stop salt-api
|
||||
printf >&2 "${GREEN}Fixing msgpack${NC}\n"
|
||||
sudo sed -i 's/msgpack_kwargs = {"raw": six.PY2}/msgpack_kwargs = {"raw": six.PY2, "max_buffer_size": 2147483647}/g' /usr/lib/python3/dist-packages/salt/transport/ipc.py
|
||||
sudo systemctl start salt-master
|
||||
printf >&2 "${GREEN}Waiting for salt...${NC}\n"
|
||||
sleep 15
|
||||
sudo systemctl start salt-api
|
||||
printf >&2 "${GREEN}Salt update finished${NC}\n"
|
||||
fi
|
||||
|
||||
sudo chown ${USER}:${USER} -R /rmm
|
||||
sudo chown ${USER}:${USER} /var/log/celery
|
||||
sudo chown ${USER}:${USER} -R /srv/salt/
|
||||
sudo chown ${USER}:${USER} -R /etc/conf.d/
|
||||
sudo chown ${USER}:www-data /srv/salt/scripts/userdefined
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.npm
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.config
|
||||
sudo chown -R $USER:$GROUP /home/${USER}/.cache
|
||||
sudo chmod 750 /srv/salt/scripts/userdefined
|
||||
sudo chown ${USER}:${USER} -R /etc/letsencrypt
|
||||
sudo chmod 775 -R /etc/letsencrypt
|
||||
|
||||
cp /rmm/_modules/* /srv/salt/_modules/
|
||||
cp /rmm/scripts/* /srv/salt/scripts/
|
||||
CHECK_REMOVE_SALT=$(grep KEEP_SALT /rmm/api/tacticalrmm/tacticalrmm/local_settings.py)
|
||||
if ! [[ $CHECK_REMOVE_SALT ]]; then
|
||||
printf >&2 "${YELLOW}This update removes salt from the rmm${NC}\n"
|
||||
printf >&2 "${YELLOW}You may continue to use salt on existing agents, but there will not be any more integration with tacticalrmm, and new agents will not install the salt-minion${NC}\n"
|
||||
until [[ $rmsalt =~ (y|n) ]]; do
|
||||
echo -ne "${YELLOW}Would you like to remove salt? (recommended) [y/n]${NC}: "
|
||||
read rmsalt
|
||||
done
|
||||
if [[ $rmsalt == "y" ]]; then
|
||||
keepsalt="$(cat << EOF
|
||||
KEEP_SALT = False
|
||||
EOF
|
||||
)"
|
||||
else
|
||||
keepsalt="$(cat << EOF
|
||||
KEEP_SALT = True
|
||||
EOF
|
||||
)"
|
||||
fi
|
||||
echo "${keepsalt}" | tee --append /rmm/api/tacticalrmm/tacticalrmm/local_settings.py > /dev/null
|
||||
|
||||
if [[ $rmsalt == "y" ]]; then
|
||||
printf >&2 "${Green}Removing salt-master and salt-api${NC}\n"
|
||||
for i in salt-api salt-master; do sudo systemctl stop $i; sudo systemctl disable $i; done
|
||||
sudo apt remove -y --purge salt-master salt-api salt-common
|
||||
else
|
||||
sudo systemctl stop salt-api
|
||||
sudo systemctl disable salt-api
|
||||
fi
|
||||
fi
|
||||
|
||||
/usr/local/rmmgo/go/bin/go get github.com/josephspurrier/goversioninfo/cmd/goversioninfo
|
||||
sudo cp /rmm/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/
|
||||
sudo chown ${USER}:${USER} /usr/local/bin/goversioninfo
|
||||
@@ -272,9 +253,9 @@ fi
|
||||
python manage.py pre_update_tasks
|
||||
python manage.py migrate
|
||||
python manage.py delete_tokens
|
||||
python manage.py fix_salt_key
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py reload_nats
|
||||
python manage.py load_chocos
|
||||
python manage.py post_update_tasks
|
||||
deactivate
|
||||
|
||||
@@ -291,13 +272,7 @@ sudo rm -rf /var/www/rmm/dist
|
||||
sudo cp -pr /rmm/web/dist /var/www/rmm/
|
||||
sudo chown www-data:www-data -R /var/www/rmm/dist
|
||||
|
||||
printf >&2 "${GREEN}Starting salt-master service${NC}\n"
|
||||
sudo systemctl start salt-master
|
||||
sleep 7
|
||||
printf >&2 "${GREEN}Starting salt-api service${NC}\n"
|
||||
sudo systemctl start salt-api
|
||||
|
||||
for i in rmm celery celerybeat celery-winupdate nginx nats natsapi
|
||||
for i in rmm celery celerybeat nginx nats natsapi
|
||||
do
|
||||
printf >&2 "${GREEN}Starting ${i} service${NC}\n"
|
||||
sudo systemctl start ${i}
|
||||
|
||||
12
web/package-lock.json
generated
12
web/package-lock.json
generated
@@ -3933,9 +3933,9 @@
|
||||
}
|
||||
},
|
||||
"@quasar/extras": {
|
||||
"version": "1.9.13",
|
||||
"resolved": "https://registry.npmjs.org/@quasar/extras/-/extras-1.9.13.tgz",
|
||||
"integrity": "sha512-9ptRGMjBqfgxzLmH4MOlzSzmkwg7JAkWaB+EqDuZmjHyqkAGjGkuZj2LK4qbJdgveR94mlsJHb5fKojKzGkt0w=="
|
||||
"version": "1.9.14",
|
||||
"resolved": "https://registry.npmjs.org/@quasar/extras/-/extras-1.9.14.tgz",
|
||||
"integrity": "sha512-8g2C0qsdKmaSbIR16C87yQbpY/pDubcZU6r69E9NuX/oNLhudWwg1kux9mdk3rlHHG4z7qty3i9+Dedampwt2A=="
|
||||
},
|
||||
"@quasar/fastclick": {
|
||||
"version": "1.1.4",
|
||||
@@ -15063,9 +15063,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"quasar": {
|
||||
"version": "1.15.0",
|
||||
"resolved": "https://registry.npmjs.org/quasar/-/quasar-1.15.0.tgz",
|
||||
"integrity": "sha512-GTIz/8AgzpHKPsbrQfO32dPPEGjDSFDWRE4dvt3AHh2LtRRAE5QJwPMqCy11GYGhyLMeZaRl76AuCUVb0BYTzA=="
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/quasar/-/quasar-1.15.1.tgz",
|
||||
"integrity": "sha512-yiqxAYh/Q56ryel3yLOLulY89m2kqqX38fFhNp/qZpMeHRezCDHceg3jRTKQwGvP2LS2bhJr+mEFVXa/5zKT5w=="
|
||||
},
|
||||
"query-string": {
|
||||
"version": "4.3.4",
|
||||
|
||||
@@ -9,12 +9,12 @@
|
||||
"test:unit": "quasar test --unit jest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@quasar/extras": "^1.9.13",
|
||||
"@quasar/extras": "^1.9.14",
|
||||
"axios": "^0.21.1",
|
||||
"apexcharts": "^3.23.1",
|
||||
"dotenv": "^8.2.0",
|
||||
"qrcode.vue": "^1.7.0",
|
||||
"quasar": "^1.15.0",
|
||||
"quasar": "^1.15.1",
|
||||
"vue-apexcharts": "^1.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -229,8 +229,8 @@ export default {
|
||||
.then(response => {
|
||||
this.$q.notify(notifySuccessConfig(`User ${data.username} was deleted!`));
|
||||
})
|
||||
.catch(error => {
|
||||
this.$q.notify(notifyErrorConfig(`An Error occured while deleting user ${data.username}`));
|
||||
.catch(e => {
|
||||
this.$q.notify(notifyErrorConfig(e.response.data));
|
||||
});
|
||||
});
|
||||
},
|
||||
@@ -295,8 +295,8 @@ export default {
|
||||
.then(response => {
|
||||
this.$q.notify(notifySuccessConfig(response.data, 4000));
|
||||
})
|
||||
.catch(error => {
|
||||
this.$q.notify(notifyErrorConfig("An Error occured while resetting key"));
|
||||
.catch(e => {
|
||||
this.$q.notify(notifyErrorConfig(e.response.data));
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
@@ -317,7 +317,7 @@
|
||||
<q-tooltip>Reboot required</q-tooltip>
|
||||
</q-icon>
|
||||
</q-td>
|
||||
<q-td key="lastseen" :props="props">{{ formatDate(props.row.last_seen) }}</q-td>
|
||||
<q-td key="lastseen" :props="props">{{ unixToString(props.row.last_seen) }}</q-td>
|
||||
<q-td key="boottime" :props="props">{{ bootTime(props.row.boot_time) }}</q-td>
|
||||
</q-tr>
|
||||
</template>
|
||||
@@ -363,7 +363,7 @@ import axios from "axios";
|
||||
import { notifySuccessConfig, notifyErrorConfig } from "@/mixins/mixins";
|
||||
import mixins from "@/mixins/mixins";
|
||||
import { mapGetters } from "vuex";
|
||||
import { openURL } from "quasar";
|
||||
import { date } from "quasar";
|
||||
import EditAgent from "@/components/modals/agents/EditAgent";
|
||||
import RebootLater from "@/components/modals/agents/RebootLater";
|
||||
import PendingActions from "@/components/modals/logs/PendingActions";
|
||||
@@ -440,9 +440,10 @@ export default {
|
||||
if (availability === "online" && row.status !== "online") return false;
|
||||
else if (availability === "offline" && row.status !== "overdue") return false;
|
||||
else if (availability === "expired") {
|
||||
const nowPlus30Days = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
|
||||
const unixtime = Date.parse(row.last_seen);
|
||||
if (unixtime > nowPlus30Days) return false;
|
||||
let now = new Date();
|
||||
let lastSeen = new Date(row.last_seen * 1000);
|
||||
let diff = date.getDateDiff(now, lastSeen, "days");
|
||||
if (diff < 30) return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -500,9 +501,12 @@ export default {
|
||||
});
|
||||
},
|
||||
runPatchStatusScan(pk, hostname) {
|
||||
axios.get(`/winupdate/${pk}/runupdatescan/`).then(r => {
|
||||
this.notifySuccess(`Scan will be run shortly on ${hostname}`);
|
||||
});
|
||||
this.$axios
|
||||
.get(`/winupdate/${pk}/runupdatescan/`)
|
||||
.then(r => {
|
||||
this.notifySuccess(`Scan will be run shortly on ${hostname}`);
|
||||
})
|
||||
.catch(e => this.notifyError(e.response.data));
|
||||
},
|
||||
installPatches(pk) {
|
||||
this.$q.loading.show();
|
||||
@@ -537,10 +541,17 @@ export default {
|
||||
window.open(url, "", "scrollbars=no,location=no,status=no,toolbar=no,menubar=no,width=1280,height=826");
|
||||
},
|
||||
runChecks(pk) {
|
||||
axios
|
||||
this.$q.loading.show();
|
||||
this.$axios
|
||||
.get(`/checks/runchecks/${pk}/`)
|
||||
.then(r => this.notifySuccess(`Checks will now be re-run on ${r.data}`))
|
||||
.catch(e => this.notifyError(e.response.data));
|
||||
.then(r => {
|
||||
this.$q.loading.hide();
|
||||
this.notifySuccess(r.data);
|
||||
})
|
||||
.catch(e => {
|
||||
this.$q.loading.hide();
|
||||
this.notifyError(e.response.data);
|
||||
});
|
||||
},
|
||||
removeAgent(pk, name) {
|
||||
this.$q
|
||||
@@ -633,14 +644,14 @@ export default {
|
||||
this.$store.dispatch("loadNotes", pk);
|
||||
},
|
||||
overdueAlert(category, pk, alert_action) {
|
||||
const db_field = category === "email" ? "overdue_email_alert" : "overdue_text_alert";
|
||||
const action = alert_action ? "enabled" : "disabled";
|
||||
const data = {
|
||||
pk: pk,
|
||||
alertType: category,
|
||||
action: action,
|
||||
[db_field]: alert_action,
|
||||
};
|
||||
const alertColor = alert_action ? "positive" : "warning";
|
||||
axios
|
||||
this.$axios
|
||||
.post("/agents/overdueaction/", data)
|
||||
.then(r => {
|
||||
this.$q.notify({
|
||||
@@ -649,7 +660,7 @@ export default {
|
||||
message: `Overdue ${category} alerts ${action} on ${r.data}`,
|
||||
});
|
||||
})
|
||||
.catch(e => this.notifyError(e.response.data.error));
|
||||
.catch(() => this.notifyError("Something went wrong"));
|
||||
},
|
||||
agentClass(status) {
|
||||
if (status === "offline") {
|
||||
|
||||
@@ -36,12 +36,6 @@
|
||||
</q-badge>
|
||||
<span>Do not popup any message boxes during install</span>
|
||||
</div>
|
||||
<div class="q-pa-xs q-gutter-xs">
|
||||
<q-badge class="text-caption q-mr-xs" color="grey" text-color="black">
|
||||
<code>-nosalt</code>
|
||||
</q-badge>
|
||||
<span> Do not install salt during agent install. </span>
|
||||
</div>
|
||||
<div class="q-pa-xs q-gutter-xs">
|
||||
<q-badge class="text-caption q-mr-xs" color="grey" text-color="black">
|
||||
<code>-local-mesh "C:\\<some folder or path>\\meshagent.exe"</code>
|
||||
|
||||
@@ -12,8 +12,6 @@
|
||||
<q-radio dense v-model="mode" val="mesh" label="Mesh Agent" />
|
||||
<q-radio dense v-model="mode" val="rpc" label="Tactical RPC" />
|
||||
<q-radio dense v-model="mode" val="tacagent" label="Tactical Agent" />
|
||||
<q-radio dense v-model="mode" val="checkrunner" label="Tactical Checkrunner" />
|
||||
<q-radio dense v-model="mode" val="salt" label="Salt Minion" />
|
||||
<q-radio dense v-model="mode" val="command" label="Shell Command" />
|
||||
</div>
|
||||
</q-card-section>
|
||||
@@ -21,13 +19,7 @@
|
||||
<p>Fix issues with the Mesh Agent which handles take control, live terminal and file browser.</p>
|
||||
</q-card-section>
|
||||
<q-card-section v-show="mode === 'tacagent'">
|
||||
<p>Fix issues with the TacticalAgent windows service which handles agent check-in and os info.</p>
|
||||
</q-card-section>
|
||||
<q-card-section v-show="mode === 'checkrunner'">
|
||||
<p>Fix issues with the Tactical Checkrunner windows service which handles running all checks.</p>
|
||||
</q-card-section>
|
||||
<q-card-section v-show="mode === 'salt'">
|
||||
<p>Fix issues with the salt-minion which handles windows updates and chocolatey.</p>
|
||||
<p>Fix issues with the TacticalAgent windows service which handles agent check-in.</p>
|
||||
</q-card-section>
|
||||
<q-card-section v-show="mode === 'rpc'">
|
||||
<p>
|
||||
|
||||
@@ -298,7 +298,6 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import axios from "axios";
|
||||
import mixins from "@/mixins/mixins";
|
||||
import { mapState } from "vuex";
|
||||
import ResetPatchPolicy from "@/components/modals/coresettings/ResetPatchPolicy";
|
||||
@@ -329,7 +328,7 @@ export default {
|
||||
},
|
||||
methods: {
|
||||
getCoreSettings() {
|
||||
axios.get("/core/getcoresettings/").then(r => {
|
||||
this.$axios.get("/core/getcoresettings/").then(r => {
|
||||
this.settings = r.data;
|
||||
this.allTimezones = Object.freeze(r.data.all_timezones);
|
||||
this.ready = true;
|
||||
@@ -388,7 +387,8 @@ export default {
|
||||
},
|
||||
editSettings() {
|
||||
this.$q.loading.show();
|
||||
axios
|
||||
delete this.settings.all_timezones;
|
||||
this.$axios
|
||||
.patch("/core/editsettings/", this.settings)
|
||||
.then(r => {
|
||||
this.$q.loading.hide();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Notify } from "quasar";
|
||||
import { Notify, date } from "quasar";
|
||||
|
||||
export function notifySuccessConfig(msg, timeout = 2000) {
|
||||
return {
|
||||
@@ -95,6 +95,10 @@ export default {
|
||||
|
||||
return includeSeconds ? formatted + ":" + appendLeadingZeroes(dt.getSeconds()) : formatted
|
||||
},
|
||||
unixToString(timestamp) {
|
||||
let t = new Date(timestamp * 1000)
|
||||
return date.formatDate(t, 'MMM-D-YYYY - HH:mm')
|
||||
},
|
||||
formatClientOptions(clients) {
|
||||
return clients.map(client => ({ label: client.name, value: client.id, sites: client.sites }))
|
||||
},
|
||||
|
||||
@@ -141,7 +141,7 @@ export default function () {
|
||||
SET_AGENT_DBLCLICK_ACTION(state, action) {
|
||||
state.agentDblClickAction = action
|
||||
},
|
||||
SET_DEFAULT_AGENT_TBL_TABd(state, tab) {
|
||||
SET_DEFAULT_AGENT_TBL_TAB(state, tab) {
|
||||
state.defaultAgentTblTab = tab
|
||||
}
|
||||
},
|
||||
|
||||
@@ -548,7 +548,7 @@ export default {
|
||||
},
|
||||
refreshEntireSite() {
|
||||
this.$store.dispatch("loadTree");
|
||||
this.getDashInfo();
|
||||
this.getDashInfo(false);
|
||||
this.getAgentCounts();
|
||||
|
||||
if (this.allClientsActive) {
|
||||
@@ -661,7 +661,7 @@ export default {
|
||||
this.poll = setInterval(() => {
|
||||
this.$store.dispatch("checkVer");
|
||||
this.getAgentCounts();
|
||||
this.getDashInfo();
|
||||
this.getDashInfo(false);
|
||||
}, 60 * 5 * 1000);
|
||||
},
|
||||
setSplitter(val) {
|
||||
@@ -675,13 +675,13 @@ export default {
|
||||
this.workstationOfflineCount = r.data.total_workstation_offline_count;
|
||||
});
|
||||
},
|
||||
getDashInfo() {
|
||||
getDashInfo(setDefaultTab = true) {
|
||||
this.$store.dispatch("getDashInfo").then(r => {
|
||||
this.darkMode = r.data.dark_mode;
|
||||
this.$q.dark.set(this.darkMode);
|
||||
this.currentTRMMVersion = r.data.trmm_version;
|
||||
this.$store.commit("SET_AGENT_DBLCLICK_ACTION", r.data.dbl_click_action);
|
||||
this.$store.commit("SET_DEFAULT_AGENT_TBL_TABd", r.data.default_agent_tbl_tab);
|
||||
if (setDefaultTab) this.$store.commit("SET_DEFAULT_AGENT_TBL_TAB", r.data.default_agent_tbl_tab);
|
||||
this.$store.commit("setShowCommunityScripts", r.data.show_community_scripts);
|
||||
});
|
||||
},
|
||||
@@ -764,9 +764,16 @@ export default {
|
||||
clientsTree: state => state.tree,
|
||||
treeReady: state => state.treeReady,
|
||||
clients: state => state.clients,
|
||||
tab: state => state.defaultAgentTblTab,
|
||||
}),
|
||||
...mapGetters(["selectedAgentPk", "needRefresh"]),
|
||||
tab: {
|
||||
get: function () {
|
||||
return this.$store.state.defaultAgentTblTab;
|
||||
},
|
||||
set: function (newVal) {
|
||||
this.$store.commit("SET_DEFAULT_AGENT_TBL_TAB", newVal);
|
||||
},
|
||||
},
|
||||
allClientsActive() {
|
||||
return this.selectedTree === "" ? true : false;
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user