Compare commits
48 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d102ef096 | ||
|
|
bf3c65778e | ||
|
|
df7fe3e6b4 | ||
|
|
b657468b62 | ||
|
|
4edc0058d3 | ||
|
|
2c3b35293b | ||
|
|
be0c9a4d46 | ||
|
|
dd4140558e | ||
|
|
71c2519b8e | ||
|
|
badfc26aed | ||
|
|
b2bc3adb3d | ||
|
|
5ccf408fd6 | ||
|
|
da185875bb | ||
|
|
af16912541 | ||
|
|
1bf9e2a5e6 | ||
|
|
5a572651ff | ||
|
|
5a191e387f | ||
|
|
18f29f5790 | ||
|
|
054a73e0f8 | ||
|
|
14824db7b0 | ||
|
|
721c48ea88 | ||
|
|
ed7bfcfb58 | ||
|
|
773a40a126 | ||
|
|
961252ef26 | ||
|
|
a2650f3c47 | ||
|
|
d71ee194e1 | ||
|
|
22e1a4cf41 | ||
|
|
a50bf901d3 | ||
|
|
c9469635b5 | ||
|
|
36df3278e5 | ||
|
|
cb2258aaa8 | ||
|
|
0391d9eb7e | ||
|
|
12698b4c20 | ||
|
|
f7b9d459ab | ||
|
|
65ab14e68b | ||
|
|
93a5dd5de4 | ||
|
|
61807bdaaa | ||
|
|
a1a5d1adba | ||
|
|
9dd4aefea5 | ||
|
|
db4540089a | ||
|
|
24c899c91a | ||
|
|
ade1a73966 | ||
|
|
fb9ec2b040 | ||
|
|
3a683812e9 | ||
|
|
6d317603c9 | ||
|
|
5a3d2d196c | ||
|
|
87d05223af | ||
|
|
babf6366e8 |
@@ -1,11 +1,11 @@
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.10.6-slim AS GET_SCRIPTS_STAGE
|
||||
FROM python:3.10.8-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
FROM python:3.10.6-slim
|
||||
FROM python:3.10.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
5
.github/workflows/ci-tests.yml
vendored
5
.github/workflows/ci-tests.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
name: Tests
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10.6"]
|
||||
python-version: ["3.10.8"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -27,9 +27,10 @@ jobs:
|
||||
postgresql password: "pipeline123456"
|
||||
|
||||
- name: Setup Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
|
||||
- name: Install redis
|
||||
run: |
|
||||
|
||||
13
.vscode/settings.json
vendored
13
.vscode/settings.json
vendored
@@ -1,7 +1,10 @@
|
||||
{
|
||||
"python.defaultInterpreterPath": "api/tacticalrmm/env/bin/python",
|
||||
"python.defaultInterpreterPath": "api/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": ["api/tacticalrmm", "api/env"],
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm",
|
||||
"api/env"
|
||||
],
|
||||
"python.analysis.diagnosticSeverityOverrides": {
|
||||
"reportUnusedImport": "error",
|
||||
"reportDuplicateImport": "error",
|
||||
@@ -22,7 +25,9 @@
|
||||
"**env/**"
|
||||
],
|
||||
"python.formatting.provider": "black",
|
||||
"mypy.targets": ["api/tacticalrmm"],
|
||||
"mypy.targets": [
|
||||
"api/tacticalrmm"
|
||||
],
|
||||
"mypy.runUsingActiveInterpreter": true,
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
"editor.guides.bracketPairs": true,
|
||||
@@ -70,4 +75,4 @@
|
||||
"completeUnimported": true,
|
||||
"staticcheck": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
user: "tactical"
|
||||
python_ver: "3.10.6"
|
||||
python_ver: "3.10.8"
|
||||
go_ver: "1.18.5"
|
||||
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
|
||||
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
import subprocess
|
||||
from contextlib import suppress
|
||||
|
||||
import pyotp
|
||||
from django.core.management.base import BaseCommand
|
||||
@@ -25,7 +26,7 @@ class Command(BaseCommand):
|
||||
nginx = "/etc/nginx/sites-available/frontend.conf"
|
||||
found = None
|
||||
if os.path.exists(nginx):
|
||||
try:
|
||||
with suppress(Exception):
|
||||
with open(nginx, "r") as f:
|
||||
for line in f:
|
||||
if "server_name" in line:
|
||||
@@ -35,8 +36,6 @@ class Command(BaseCommand):
|
||||
if found:
|
||||
rep = found.replace("server_name", "").replace(";", "")
|
||||
domain = "".join(rep.split())
|
||||
except:
|
||||
pass
|
||||
|
||||
code = pyotp.random_base32()
|
||||
user.totp_key = code
|
||||
|
||||
@@ -31,8 +31,8 @@ class RolesPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_roles")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_roles")
|
||||
|
||||
return _has_perm(r, "can_manage_roles")
|
||||
|
||||
|
||||
class APIKeyPerms(permissions.BasePermission):
|
||||
|
||||
@@ -64,7 +64,7 @@ class Command(BaseCommand):
|
||||
try:
|
||||
agent.delete()
|
||||
except Exception as e:
|
||||
err = f"Failed to delete agent {agent.hostname}: {str(e)}"
|
||||
err = f"Failed to delete agent {agent.hostname}: {e}"
|
||||
self.stdout.write(self.style.ERROR(err))
|
||||
else:
|
||||
deleted_count += 1
|
||||
|
||||
@@ -27,6 +27,7 @@ from tacticalrmm.constants import (
|
||||
EvtLogFailWhen,
|
||||
EvtLogNames,
|
||||
EvtLogTypes,
|
||||
GoArch,
|
||||
PAAction,
|
||||
ScriptShell,
|
||||
TaskSyncStatus,
|
||||
@@ -47,10 +48,12 @@ from tacticalrmm.demo_data import (
|
||||
temp_dir_stdout,
|
||||
wmi_deb,
|
||||
wmi_pi,
|
||||
wmi_mac,
|
||||
disks_mac,
|
||||
)
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
AGENTS_TO_GENERATE = 20
|
||||
AGENTS_TO_GENERATE = 250
|
||||
|
||||
SVCS = settings.BASE_DIR.joinpath("tacticalrmm/test_data/winsvcs.json")
|
||||
WMI_1 = settings.BASE_DIR.joinpath("tacticalrmm/test_data/wmi1.json")
|
||||
@@ -177,6 +180,8 @@ class Command(BaseCommand):
|
||||
"WSUS",
|
||||
"DESKTOP-12345",
|
||||
"LAPTOP-55443",
|
||||
"db-aws-01",
|
||||
"Karens-MacBook-Air.local",
|
||||
)
|
||||
descriptions = ("Bob's computer", "Primary DC", "File Server", "Karen's Laptop")
|
||||
modes = AgentMonType.values
|
||||
@@ -194,6 +199,7 @@ class Command(BaseCommand):
|
||||
|
||||
linux_deb_os = "Debian 11.2 x86_64 5.10.0-11-amd64"
|
||||
linux_pi_os = "Raspbian 11.2 armv7l 5.10.92-v7+"
|
||||
mac_os = "Darwin 12.5.1 arm64 21.6.0"
|
||||
|
||||
public_ips = ("65.234.22.4", "74.123.43.5", "44.21.134.45")
|
||||
|
||||
@@ -313,18 +319,25 @@ class Command(BaseCommand):
|
||||
mode = AgentMonType.SERVER
|
||||
# pi arm
|
||||
if plat_pick == 7:
|
||||
agent.goarch = "arm"
|
||||
agent.goarch = GoArch.ARM32
|
||||
agent.wmi_detail = wmi_pi
|
||||
agent.disks = disks_linux_pi
|
||||
agent.operating_system = linux_pi_os
|
||||
else:
|
||||
agent.goarch = "amd64"
|
||||
agent.goarch = GoArch.AMD64
|
||||
agent.wmi_detail = wmi_deb
|
||||
agent.disks = disks_linux_deb
|
||||
agent.operating_system = linux_deb_os
|
||||
elif plat_pick in (4, 14):
|
||||
agent.plat = AgentPlat.DARWIN
|
||||
mode = random.choice([AgentMonType.SERVER, AgentMonType.WORKSTATION])
|
||||
agent.goarch = GoArch.ARM64
|
||||
agent.wmi_detail = wmi_mac
|
||||
agent.disks = disks_mac
|
||||
agent.operating_system = mac_os
|
||||
else:
|
||||
agent.plat = AgentPlat.WINDOWS
|
||||
agent.goarch = "amd64"
|
||||
agent.goarch = GoArch.AMD64
|
||||
mode = random.choice(modes)
|
||||
agent.wmi_detail = random.choice(wmi_details)
|
||||
agent.services = services
|
||||
@@ -334,8 +347,8 @@ class Command(BaseCommand):
|
||||
else:
|
||||
agent.operating_system = random.choice(op_systems_workstations)
|
||||
|
||||
agent.hostname = random.choice(hostnames)
|
||||
agent.version = settings.LATEST_AGENT_VER
|
||||
agent.hostname = random.choice(hostnames)
|
||||
agent.site = Site.objects.get(name=site)
|
||||
agent.agent_id = self.rand_string(40)
|
||||
agent.description = random.choice(descriptions)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import re
|
||||
from collections import Counter
|
||||
from contextlib import suppress
|
||||
from distutils.version import LooseVersion
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
|
||||
|
||||
@@ -130,8 +131,8 @@ class Agent(BaseAuditModel):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
if self.time_zone:
|
||||
return self.time_zone
|
||||
else:
|
||||
return get_core_settings().default_time_zone
|
||||
|
||||
return get_core_settings().default_time_zone
|
||||
|
||||
@property
|
||||
def is_posix(self) -> bool:
|
||||
@@ -232,12 +233,12 @@ class Agent(BaseAuditModel):
|
||||
alert_severity = (
|
||||
check.check_result.alert_severity
|
||||
if check.check_type
|
||||
in [
|
||||
in (
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
]
|
||||
)
|
||||
else check.alert_severity
|
||||
)
|
||||
if alert_severity == AlertSeverity.ERROR:
|
||||
@@ -333,8 +334,8 @@ class Agent(BaseAuditModel):
|
||||
|
||||
if len(ret) == 1:
|
||||
return cast(str, ret[0])
|
||||
else:
|
||||
return ", ".join(ret) if ret else "error getting local ips"
|
||||
|
||||
return ", ".join(ret) if ret else "error getting local ips"
|
||||
|
||||
@property
|
||||
def make_model(self) -> str:
|
||||
@@ -344,7 +345,7 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return "error getting make/model"
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
comp_sys = self.wmi_detail["comp_sys"][0]
|
||||
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||
make = [x["Vendor"] for x in comp_sys_prod if "Vendor" in x][0]
|
||||
@@ -361,14 +362,10 @@ class Agent(BaseAuditModel):
|
||||
model = sysfam
|
||||
|
||||
return f"{make} {model}"
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
comp_sys_prod = self.wmi_detail["comp_sys_prod"][0]
|
||||
return cast(str, [x["Version"] for x in comp_sys_prod if "Version" in x][0])
|
||||
except:
|
||||
pass
|
||||
|
||||
return "unknown make/model"
|
||||
|
||||
@@ -479,7 +476,7 @@ class Agent(BaseAuditModel):
|
||||
models.prefetch_related_objects(
|
||||
[
|
||||
policy
|
||||
for policy in [self.policy, site_policy, client_policy, default_policy]
|
||||
for policy in (self.policy, site_policy, client_policy, default_policy)
|
||||
if policy
|
||||
],
|
||||
"excluded_agents",
|
||||
@@ -589,7 +586,7 @@ class Agent(BaseAuditModel):
|
||||
def approve_updates(self) -> None:
|
||||
patch_policy = self.get_patch_policy()
|
||||
|
||||
severity_list = list()
|
||||
severity_list = []
|
||||
if patch_policy.critical == "approve":
|
||||
severity_list.append("Critical")
|
||||
|
||||
@@ -621,17 +618,14 @@ class Agent(BaseAuditModel):
|
||||
if not agent_policy:
|
||||
agent_policy = WinUpdatePolicy.objects.create(agent=self)
|
||||
|
||||
# Get the list of policies applied to the agent and select the
|
||||
# highest priority one.
|
||||
policies = self.get_agent_policies()
|
||||
|
||||
processed_policies: List[int] = list()
|
||||
for _, policy in policies.items():
|
||||
if (
|
||||
policy
|
||||
and policy.active
|
||||
and policy.pk not in processed_policies
|
||||
and policy.winupdatepolicy.exists()
|
||||
):
|
||||
if policy and policy.active and policy.winupdatepolicy.exists():
|
||||
patch_policy = policy.winupdatepolicy.first()
|
||||
break
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
if not patch_policy:
|
||||
@@ -683,7 +677,7 @@ class Agent(BaseAuditModel):
|
||||
policies = self.get_agent_policies()
|
||||
|
||||
# loop through all policies applied to agent and return an alert_template if found
|
||||
processed_policies: List[int] = list()
|
||||
processed_policies: List[int] = []
|
||||
for key, policy in policies.items():
|
||||
# default alert_template will override a default policy with alert template applied
|
||||
if (
|
||||
@@ -873,7 +867,7 @@ class Agent(BaseAuditModel):
|
||||
return AgentAuditSerializer(agent).data
|
||||
|
||||
def delete_superseded_updates(self) -> None:
|
||||
try:
|
||||
with suppress(Exception):
|
||||
pks = [] # list of pks to delete
|
||||
kbs = list(self.winupdates.values_list("kb", flat=True))
|
||||
d = Counter(kbs)
|
||||
@@ -898,8 +892,6 @@ class Agent(BaseAuditModel):
|
||||
|
||||
pks = list(set(pks))
|
||||
self.winupdates.filter(pk__in=pks).delete()
|
||||
except:
|
||||
pass
|
||||
|
||||
def should_create_alert(
|
||||
self, alert_template: "Optional[AlertTemplate]" = None
|
||||
@@ -1018,16 +1010,16 @@ class AgentCustomField(models.Model):
|
||||
return cast(List[str], self.multiple_value)
|
||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
||||
return self.bool_value
|
||||
else:
|
||||
return cast(str, self.string_value)
|
||||
|
||||
return cast(str, self.string_value)
|
||||
|
||||
def save_to_field(self, value: Union[List[Any], bool, str]) -> None:
|
||||
if self.field.type in [
|
||||
if self.field.type in (
|
||||
CustomFieldType.TEXT,
|
||||
CustomFieldType.NUMBER,
|
||||
CustomFieldType.SINGLE,
|
||||
CustomFieldType.DATETIME,
|
||||
]:
|
||||
):
|
||||
self.string_value = cast(str, value)
|
||||
self.save()
|
||||
elif self.field.type == CustomFieldType.MULTIPLE:
|
||||
|
||||
@@ -122,5 +122,5 @@ class AgentHistoryPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_list_agent_history") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_agent_history")
|
||||
|
||||
return _has_perm(r, "can_list_agent_history")
|
||||
|
||||
@@ -100,21 +100,21 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not obj.alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": obj.alert_template.name,
|
||||
"always_email": obj.alert_template.agent_always_email,
|
||||
"always_text": obj.alert_template.agent_always_text,
|
||||
"always_alert": obj.alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
return {
|
||||
"name": obj.alert_template.name,
|
||||
"always_email": obj.alert_template.agent_always_email,
|
||||
"always_text": obj.alert_template.agent_always_text,
|
||||
"always_alert": obj.alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
def get_logged_username(self, obj) -> str:
|
||||
if obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE:
|
||||
return obj.last_logged_in_user
|
||||
elif obj.logged_in_username != "None":
|
||||
return obj.logged_in_username
|
||||
else:
|
||||
return "-"
|
||||
|
||||
return "-"
|
||||
|
||||
def get_italic(self, obj) -> bool:
|
||||
return obj.logged_in_username == "None" and obj.status == AGENT_STATUS_ONLINE
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import FileResponse
|
||||
@@ -54,9 +55,7 @@ def generate_linux_install(
|
||||
f"{core.mesh_site}/meshagents?id={mesh_id}&installflags=2&meshinstall={arch_id}"
|
||||
)
|
||||
|
||||
sh = settings.LINUX_AGENT_SCRIPT
|
||||
with open(sh, "r") as f:
|
||||
text = f.read()
|
||||
text = Path(settings.LINUX_AGENT_SCRIPT).read_text()
|
||||
|
||||
replace = {
|
||||
"agentDLChange": download_url,
|
||||
|
||||
@@ -4,6 +4,7 @@ import os
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Count, Exists, OuterRef, Prefetch, Q
|
||||
@@ -239,11 +240,9 @@ class GetUpdateDeleteAgent(APIView):
|
||||
|
||||
code = "foo" # stub for windows
|
||||
if agent.plat == AgentPlat.LINUX:
|
||||
with open(settings.LINUX_AGENT_SCRIPT, "r") as f:
|
||||
code = f.read()
|
||||
code = Path(settings.LINUX_AGENT_SCRIPT).read_text()
|
||||
elif agent.plat == AgentPlat.DARWIN:
|
||||
with open(settings.MAC_UNINSTALL, "r") as f:
|
||||
code = f.read()
|
||||
code = Path(settings.MAC_UNINSTALL).read_text()
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall", "code": code}, wait=False))
|
||||
name = agent.hostname
|
||||
@@ -255,7 +254,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
asyncio.run(remove_mesh_agent(uri, mesh_id))
|
||||
except Exception as e:
|
||||
DebugLog.error(
|
||||
message=f"Unable to remove agent {name} from meshcentral database: {str(e)}",
|
||||
message=f"Unable to remove agent {name} from meshcentral database: {e}",
|
||||
log_type=DebugLogType.AGENT_ISSUES,
|
||||
)
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
@@ -273,7 +272,7 @@ class AgentProcesses(APIView):
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response(r)
|
||||
|
||||
@@ -284,7 +283,7 @@ class AgentProcesses(APIView):
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r != "ok":
|
||||
return notify_error(r)
|
||||
@@ -416,7 +415,7 @@ def get_event_log(request, agent_id, logtype, days):
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(r)
|
||||
@@ -654,10 +653,7 @@ def install_agent(request):
|
||||
|
||||
elif request.data["installMethod"] == "powershell":
|
||||
|
||||
ps = os.path.join(settings.BASE_DIR, "core/installer.ps1")
|
||||
|
||||
with open(ps, "r") as f:
|
||||
text = f.read()
|
||||
text = Path(settings.BASE_DIR / "core" / "installer.ps1").read_text()
|
||||
|
||||
replace_dict = {
|
||||
"innosetupchange": inno,
|
||||
@@ -684,8 +680,7 @@ def install_agent(request):
|
||||
except Exception as e:
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
with open(ps1, "w") as f:
|
||||
f.write(text)
|
||||
Path(ps1).write_text(text)
|
||||
|
||||
if settings.DEBUG:
|
||||
with open(ps1, "r") as f:
|
||||
@@ -1014,10 +1009,10 @@ def agent_maintenance(request):
|
||||
if count:
|
||||
action = "disabled" if not request.data["action"] else "enabled"
|
||||
return Response(f"Maintenance mode has been {action} on {count} agents")
|
||||
else:
|
||||
return Response(
|
||||
f"No agents have been put in maintenance mode. You might not have permissions to the resources."
|
||||
)
|
||||
|
||||
return Response(
|
||||
f"No agents have been put in maintenance mode. You might not have permissions to the resources."
|
||||
)
|
||||
|
||||
|
||||
@api_view(["GET"])
|
||||
|
||||
@@ -617,7 +617,7 @@ class Alert(models.Model):
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
temp_args = []
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ def _has_perm_on_alert(user: "User", id: int) -> bool:
|
||||
|
||||
class AlertPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET" or r.method == "PATCH":
|
||||
if r.method in ("GET", "PATCH"):
|
||||
if "pk" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_alerts") and _has_perm_on_alert(
|
||||
r.user, view.kwargs["pk"]
|
||||
@@ -52,5 +52,5 @@ class AlertTemplatePerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_alerttemplates")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_alerttemplates")
|
||||
|
||||
return _has_perm(r, "can_manage_alerttemplates")
|
||||
|
||||
@@ -41,13 +41,13 @@ class GetAddAlerts(APIView):
|
||||
|
||||
elif any(
|
||||
key
|
||||
in [
|
||||
in (
|
||||
"timeFilter",
|
||||
"clientFilter",
|
||||
"severityFilter",
|
||||
"resolvedFilter",
|
||||
"snoozedFilter",
|
||||
]
|
||||
)
|
||||
for key in request.data.keys()
|
||||
):
|
||||
clientFilter = Q()
|
||||
|
||||
@@ -218,12 +218,12 @@ class Policy(BaseAuditModel):
|
||||
def get_policy_tasks(agent: "Agent") -> "List[AutomatedTask]":
|
||||
|
||||
# List of all tasks to be applied
|
||||
tasks = list()
|
||||
tasks = []
|
||||
|
||||
# Get policies applied to agent and agent site and client
|
||||
policies = agent.get_agent_policies()
|
||||
|
||||
processed_policies = list()
|
||||
processed_policies = []
|
||||
|
||||
for _, policy in policies.items():
|
||||
if policy and policy.active and policy.pk not in processed_policies:
|
||||
@@ -244,10 +244,10 @@ class Policy(BaseAuditModel):
|
||||
|
||||
# Used to hold the policies that will be applied and the order in which they are applied
|
||||
# Enforced policies are applied first
|
||||
enforced_checks = list()
|
||||
policy_checks = list()
|
||||
enforced_checks = []
|
||||
policy_checks = []
|
||||
|
||||
processed_policies = list()
|
||||
processed_policies = []
|
||||
|
||||
for _, policy in policies.items():
|
||||
if policy and policy.active and policy.pk not in processed_policies:
|
||||
@@ -263,24 +263,24 @@ class Policy(BaseAuditModel):
|
||||
return []
|
||||
|
||||
# Sorted Checks already added
|
||||
added_diskspace_checks: List[str] = list()
|
||||
added_ping_checks: List[str] = list()
|
||||
added_winsvc_checks: List[str] = list()
|
||||
added_script_checks: List[int] = list()
|
||||
added_eventlog_checks: List[List[str]] = list()
|
||||
added_cpuload_checks: List[int] = list()
|
||||
added_memory_checks: List[int] = list()
|
||||
added_diskspace_checks: List[str] = []
|
||||
added_ping_checks: List[str] = []
|
||||
added_winsvc_checks: List[str] = []
|
||||
added_script_checks: List[int] = []
|
||||
added_eventlog_checks: List[List[str]] = []
|
||||
added_cpuload_checks: List[int] = []
|
||||
added_memory_checks: List[int] = []
|
||||
|
||||
# Lists all agent and policy checks that will be returned
|
||||
diskspace_checks: "List[Check]" = list()
|
||||
ping_checks: "List[Check]" = list()
|
||||
winsvc_checks: "List[Check]" = list()
|
||||
script_checks: "List[Check]" = list()
|
||||
eventlog_checks: "List[Check]" = list()
|
||||
cpuload_checks: "List[Check]" = list()
|
||||
memory_checks: "List[Check]" = list()
|
||||
diskspace_checks: "List[Check]" = []
|
||||
ping_checks: "List[Check]" = []
|
||||
winsvc_checks: "List[Check]" = []
|
||||
script_checks: "List[Check]" = []
|
||||
eventlog_checks: "List[Check]" = []
|
||||
cpuload_checks: "List[Check]" = []
|
||||
memory_checks: "List[Check]" = []
|
||||
|
||||
overridden_checks: List[int] = list()
|
||||
overridden_checks: List[int] = []
|
||||
|
||||
# Loop over checks in with enforced policies first, then non-enforced policies
|
||||
for check in enforced_checks + agent_checks + policy_checks:
|
||||
|
||||
@@ -7,5 +7,5 @@ class AutomationPolicyPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_automation_policies")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_automation_policies")
|
||||
|
||||
return _has_perm(r, "can_manage_automation_policies")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import random
|
||||
import string
|
||||
from contextlib import suppress
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
||||
|
||||
import pytz
|
||||
@@ -262,13 +263,13 @@ class AutomatedTask(BaseAuditModel):
|
||||
else True,
|
||||
}
|
||||
|
||||
if self.task_type in [
|
||||
if self.task_type in (
|
||||
TaskType.RUN_ONCE,
|
||||
TaskType.DAILY,
|
||||
TaskType.WEEKLY,
|
||||
TaskType.MONTHLY,
|
||||
TaskType.MONTHLY_DOW,
|
||||
]:
|
||||
):
|
||||
# set runonce task in future if creating and run_asap_after_missed is set
|
||||
if (
|
||||
not editing
|
||||
@@ -432,10 +433,8 @@ class AutomatedTask(BaseAuditModel):
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
task_result.sync_status = TaskSyncStatus.PENDING_DELETION
|
||||
|
||||
try:
|
||||
with suppress(DatabaseError):
|
||||
task_result.save(update_fields=["sync_status"])
|
||||
except DatabaseError:
|
||||
pass
|
||||
|
||||
DebugLog.warning(
|
||||
agent=agent,
|
||||
|
||||
@@ -100,13 +100,13 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
# run_time_date required
|
||||
if (
|
||||
data["task_type"]
|
||||
in [
|
||||
in (
|
||||
TaskType.RUN_ONCE,
|
||||
TaskType.DAILY,
|
||||
TaskType.WEEKLY,
|
||||
TaskType.MONTHLY,
|
||||
TaskType.MONTHLY_DOW,
|
||||
]
|
||||
)
|
||||
and not data["run_time_date"]
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
@@ -188,13 +188,12 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.task_always_email,
|
||||
"always_text": alert_template.task_always_text,
|
||||
"always_alert": alert_template.task_always_alert,
|
||||
}
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.task_always_email,
|
||||
"always_text": alert_template.task_always_text,
|
||||
"always_alert": alert_template.task_always_alert,
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
from contextlib import suppress
|
||||
from time import sleep
|
||||
from typing import Optional, Union
|
||||
|
||||
@@ -16,60 +17,64 @@ from tacticalrmm.constants import DebugLogType
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.create_task_on_agent(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.create_task_on_agent()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def modify_win_task(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.modify_task_on_agent(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.modify_task_on_agent()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_win_task_schedule(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.delete_task_on_agent(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.delete_task_on_agent()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_task(pk: int, agent_id: Optional[str] = None) -> str:
|
||||
try:
|
||||
with suppress(
|
||||
AutomatedTask.DoesNotExist,
|
||||
Agent.DoesNotExist,
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if agent_id:
|
||||
task.run_win_task(Agent.objects.get(agent_id=agent_id))
|
||||
else:
|
||||
task.run_win_task()
|
||||
except (AutomatedTask.DoesNotExist, Agent.DoesNotExist):
|
||||
pass
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@@ -149,8 +149,8 @@ class Check(BaseAuditModel):
|
||||
def __str__(self):
|
||||
if self.agent:
|
||||
return f"{self.agent.hostname} - {self.readable_desc}"
|
||||
else:
|
||||
return f"{self.policy.name} - {self.readable_desc}"
|
||||
|
||||
return f"{self.policy.name} - {self.readable_desc}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -198,10 +198,7 @@ class Check(BaseAuditModel):
|
||||
return f"{display}: Drive {self.disk} - {text}"
|
||||
elif self.check_type == CheckType.PING:
|
||||
return f"{display}: {self.name}"
|
||||
elif (
|
||||
self.check_type == CheckType.CPU_LOAD or self.check_type == CheckType.MEMORY
|
||||
):
|
||||
|
||||
elif self.check_type in (CheckType.CPU_LOAD, CheckType.MEMORY):
|
||||
text = ""
|
||||
if self.warning_threshold:
|
||||
text += f" Warning Threshold: {self.warning_threshold}%"
|
||||
@@ -215,8 +212,8 @@ class Check(BaseAuditModel):
|
||||
return f"{display}: {self.name}"
|
||||
elif self.check_type == CheckType.SCRIPT:
|
||||
return f"{display}: {self.script.name}"
|
||||
else:
|
||||
return "n/a"
|
||||
|
||||
return "n/a"
|
||||
|
||||
@staticmethod
|
||||
def non_editable_fields() -> list[str]:
|
||||
@@ -335,12 +332,12 @@ class CheckResult(models.Model):
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
# if check is a policy check clear cache on everything
|
||||
if not self.alert_severity and self.assigned_check.check_type in [
|
||||
if not self.alert_severity and self.assigned_check.check_type in (
|
||||
CheckType.MEMORY,
|
||||
CheckType.CPU_LOAD,
|
||||
CheckType.DISK_SPACE,
|
||||
CheckType.SCRIPT,
|
||||
]:
|
||||
):
|
||||
self.alert_severity = AlertSeverity.WARNING
|
||||
|
||||
super(CheckResult, self).save(
|
||||
|
||||
@@ -5,7 +5,7 @@ from tacticalrmm.permissions import _has_perm, _has_perm_on_agent
|
||||
|
||||
class ChecksPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET" or r.method == "PATCH":
|
||||
if r.method in ("GET", "PATCH"):
|
||||
if "agent_id" in view.kwargs.keys():
|
||||
return _has_perm(r, "can_list_checks") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
|
||||
@@ -43,13 +43,13 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
|
||||
if not alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.check_always_email,
|
||||
"always_text": alert_template.check_always_text,
|
||||
"always_alert": alert_template.check_always_alert,
|
||||
}
|
||||
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.check_always_email,
|
||||
"always_text": alert_template.check_always_text,
|
||||
"always_alert": alert_template.check_always_alert,
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
|
||||
@@ -239,7 +239,6 @@ class TestCheckViews(TacticalTestCase):
|
||||
r = self.client.post(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "timeout"
|
||||
|
||||
@@ -169,6 +169,6 @@ def run_checks(request, agent_id):
|
||||
if r == "busy":
|
||||
return notify_error(f"Checks are already running on {agent.hostname}")
|
||||
elif r == "ok":
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
else:
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response(f"Checks will now be run on {agent.hostname}")
|
||||
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
@@ -229,16 +229,16 @@ class ClientCustomField(models.Model):
|
||||
return self.multiple_value
|
||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
if self.field.type in (
|
||||
CustomFieldType.TEXT,
|
||||
CustomFieldType.NUMBER,
|
||||
CustomFieldType.SINGLE,
|
||||
CustomFieldType.DATETIME,
|
||||
]:
|
||||
):
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == CustomFieldType.MULTIPLE:
|
||||
@@ -280,16 +280,16 @@ class SiteCustomField(models.Model):
|
||||
return self.multiple_value
|
||||
elif self.field.type == CustomFieldType.CHECKBOX:
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
return self.string_value
|
||||
|
||||
def save_to_field(self, value):
|
||||
if self.field.type in [
|
||||
if self.field.type in (
|
||||
CustomFieldType.TEXT,
|
||||
CustomFieldType.NUMBER,
|
||||
CustomFieldType.SINGLE,
|
||||
CustomFieldType.DATETIME,
|
||||
]:
|
||||
):
|
||||
self.string_value = value
|
||||
self.save()
|
||||
elif self.field.type == CustomFieldType.MULTIPLE:
|
||||
|
||||
@@ -12,7 +12,7 @@ class ClientsPerms(permissions.BasePermission):
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_clients")
|
||||
elif r.method == "PUT" or r.method == "DELETE":
|
||||
elif r.method in ("PUT", "DELETE"):
|
||||
return _has_perm(r, "can_manage_clients") and _has_perm_on_client(
|
||||
r.user, view.kwargs["pk"]
|
||||
)
|
||||
@@ -29,7 +29,7 @@ class SitesPerms(permissions.BasePermission):
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_list_sites")
|
||||
elif r.method == "PUT" or r.method == "DELETE":
|
||||
elif r.method in ("PUT", "DELETE"):
|
||||
return _has_perm(r, "can_manage_sites") and _has_perm_on_site(
|
||||
r.user, view.kwargs["pk"]
|
||||
)
|
||||
@@ -41,5 +41,5 @@ class DeploymentPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_deployments")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_deployments")
|
||||
|
||||
return _has_perm(r, "can_manage_deployments")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import datetime as dt
|
||||
import re
|
||||
import uuid
|
||||
from contextlib import suppress
|
||||
|
||||
from django.db.models import Count, Exists, OuterRef, Prefetch, prefetch_related_objects
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -338,10 +339,8 @@ class AgentDeployment(APIView):
|
||||
if not _has_perm_on_site(request.user, d.site.pk):
|
||||
raise PermissionDenied()
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
d.auth_token.delete()
|
||||
except:
|
||||
pass
|
||||
|
||||
d.delete()
|
||||
return Response("The deployment was deleted")
|
||||
|
||||
@@ -67,8 +67,11 @@ RemoveOldAgent() {
|
||||
InstallMesh() {
|
||||
if [ -f /etc/os-release ]; then
|
||||
distroID=$(. /etc/os-release; echo $ID)
|
||||
distroIDLIKE=$(. /etc/os-release; echo $ID_LIKE)
|
||||
if [[ " ${deb[*]} " =~ " ${distroID} " ]]; then
|
||||
set_locale_deb
|
||||
elif [[ " ${deb[*]} " =~ " ${distroIDLIKE} " ]]; then
|
||||
set_locale_deb
|
||||
elif [[ " ${rhe[*]} " =~ " ${distroID} " ]]; then
|
||||
set_locale_rhel
|
||||
else
|
||||
@@ -78,7 +81,7 @@ InstallMesh() {
|
||||
|
||||
meshTmpDir=$(mktemp -d -t "mesh-XXXXXXXXX")
|
||||
if [ $? -ne 0 ]; then
|
||||
meshTmpDir='meshtemp'
|
||||
meshTmpDir='/root/meshtemp'
|
||||
mkdir -p ${meshTmpDir}
|
||||
fi
|
||||
meshTmpBin="${meshTmpDir}/meshagent"
|
||||
@@ -119,6 +122,10 @@ RemoveOldAgent
|
||||
|
||||
echo "Downloading tactical agent..."
|
||||
wget -q -O ${agentBin} "${agentDL}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to download tactical agent"
|
||||
exit 1
|
||||
fi
|
||||
chmod +x ${agentBin}
|
||||
|
||||
MESH_NODE_ID=""
|
||||
@@ -178,4 +185,4 @@ EOF
|
||||
echo "${tacticalsvc}" | tee ${agentSysD} > /dev/null
|
||||
|
||||
systemctl daemon-reload
|
||||
systemctl enable --now ${agentSvcName}
|
||||
systemctl enable --now ${agentSvcName}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
@@ -24,10 +25,8 @@ class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
|
||||
async def disconnect(self, close_code):
|
||||
|
||||
try:
|
||||
with suppress(Exception):
|
||||
self.dash_info.cancel()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.connected = False
|
||||
await self.close()
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
/usr/local/mesh_services/meshagent/meshagent -fulluninstall
|
||||
if [ -f /usr/local/mesh_services/meshagent/meshagent ]; then
|
||||
/usr/local/mesh_services/meshagent/meshagent -fulluninstall
|
||||
fi
|
||||
|
||||
if [ -f /opt/tacticalmesh/meshagent ]; then
|
||||
/opt/tacticalmesh/meshagent -fulluninstall
|
||||
fi
|
||||
|
||||
launchctl bootout system /Library/LaunchDaemons/tacticalagent.plist
|
||||
rm -rf /usr/local/mesh_services
|
||||
rm -rf /opt/tacticalmesh
|
||||
rm -f /etc/tacticalagent
|
||||
rm -rf /opt/tacticalagent
|
||||
rm -f /Library/LaunchDaemons/tacticalagent.plist
|
||||
@@ -0,0 +1,60 @@
|
||||
import configparser
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate conf for uwsgi"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
self.stdout.write("Creating uwsgi conf...")
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
if getattr(settings, "DOCKER_BUILD", False):
|
||||
home = str(Path(os.getenv("VIRTUAL_ENV"))) # type: ignore
|
||||
socket = "0.0.0.0:8080"
|
||||
else:
|
||||
home = str(settings.BASE_DIR.parents[0] / "env")
|
||||
socket = str(settings.BASE_DIR / "tacticalrmm.sock")
|
||||
|
||||
config["uwsgi"] = {
|
||||
"chdir": str(settings.BASE_DIR),
|
||||
"module": "tacticalrmm.wsgi",
|
||||
"home": home,
|
||||
"master": str(getattr(settings, "UWSGI_MASTER", True)).lower(),
|
||||
"enable-threads": str(
|
||||
getattr(settings, "UWSGI_ENABLE_THREADS", True)
|
||||
).lower(),
|
||||
"socket": socket,
|
||||
"harakiri": str(getattr(settings, "UWSGI_HARAKIRI", 300)),
|
||||
"chmod-socket": str(getattr(settings, "UWSGI_CHMOD_SOCKET", 660)),
|
||||
"buffer-size": str(getattr(settings, "UWSGI_BUFFER_SIZE", 65535)),
|
||||
"vacuum": str(getattr(settings, "UWSGI_VACUUM", True)).lower(),
|
||||
"die-on-term": str(getattr(settings, "UWSGI_DIE_ON_TERM", True)).lower(),
|
||||
"max-requests": str(getattr(settings, "UWSGI_MAX_REQUESTS", 500)),
|
||||
"disable-logging": str(
|
||||
getattr(settings, "UWSGI_DISABLE_LOGGING", True)
|
||||
).lower(),
|
||||
"cheaper-algo": "busyness",
|
||||
"cheaper": str(getattr(settings, "UWSGI_CHEAPER", 4)),
|
||||
"cheaper-initial": str(getattr(settings, "UWSGI_CHEAPER_INITIAL", 4)),
|
||||
"workers": str(getattr(settings, "UWSGI_MAX_WORKERS", 40)),
|
||||
"cheaper-step": str(getattr(settings, "UWSGI_CHEAPER_STEP", 2)),
|
||||
"cheaper-overload": str(getattr(settings, "UWSGI_CHEAPER_OVERLOAD", 3)),
|
||||
"cheaper-busyness-min": str(getattr(settings, "UWSGI_BUSYNESS_MIN", 5)),
|
||||
"cheaper-busyness-max": str(getattr(settings, "UWSGI_BUSYNESS_MAX", 10)),
|
||||
}
|
||||
|
||||
if getattr(settings, "UWSGI_DEBUG", False):
|
||||
config["uwsgi"]["stats"] = "/tmp/stats.socket"
|
||||
config["uwsgi"]["cheaper-busyness-verbose"] = str(True).lower()
|
||||
|
||||
with open(settings.BASE_DIR / "app.ini", "w") as fp:
|
||||
config.write(fp)
|
||||
|
||||
self.stdout.write("Created uwsgi conf")
|
||||
@@ -1,3 +1,5 @@
|
||||
from contextlib import suppress
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
@@ -8,9 +10,7 @@ class Command(BaseCommand):
|
||||
help = "Populates the global site settings on first install"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
try:
|
||||
# can only be 1 instance of this. Prevents error when rebuilding docker container
|
||||
with suppress(ValidationError):
|
||||
CoreSettings().save()
|
||||
self.stdout.write("Core db populated")
|
||||
except ValidationError:
|
||||
# can only be 1 instance of this. Prevents error when rebuilding docker container
|
||||
pass
|
||||
|
||||
@@ -7,6 +7,6 @@ class Command(BaseCommand):
|
||||
help = "Collection of tasks to run after updating the rmm, before migrations"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.stdout.write(self.style.WARNING("Clearning the cache"))
|
||||
self.stdout.write(self.style.WARNING("Cleaning the cache"))
|
||||
clear_entire_cache()
|
||||
self.stdout.write(self.style.SUCCESS("Cache was cleared!"))
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import smtplib
|
||||
from contextlib import suppress
|
||||
from email.message import EmailMessage
|
||||
from typing import TYPE_CHECKING, List, Optional, cast
|
||||
|
||||
@@ -108,12 +109,10 @@ class CoreSettings(BaseAuditModel):
|
||||
|
||||
# for install script
|
||||
if not self.pk:
|
||||
try:
|
||||
with suppress(Exception):
|
||||
self.mesh_site = settings.MESH_SITE
|
||||
self.mesh_username = settings.MESH_USERNAME.lower()
|
||||
self.mesh_token = settings.MESH_TOKEN_KEY
|
||||
except:
|
||||
pass
|
||||
|
||||
old_settings = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
@@ -315,8 +314,8 @@ class CustomField(BaseAuditModel):
|
||||
return self.default_values_multiple
|
||||
elif self.type == CustomFieldType.CHECKBOX:
|
||||
return self.default_value_bool
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
return self.default_value_string
|
||||
|
||||
def get_or_create_field_value(self, instance):
|
||||
from agents.models import Agent, AgentCustomField
|
||||
@@ -365,6 +364,23 @@ class CodeSignToken(models.Model):
|
||||
|
||||
return r.status_code == 200
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
if not self.token:
|
||||
return False
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
settings.CHECK_TOKEN_URL,
|
||||
json={"token": self.token, "api": settings.ALLOWED_HOSTS[0]},
|
||||
headers={"Content-type": "application/json"},
|
||||
timeout=15,
|
||||
)
|
||||
except:
|
||||
return False
|
||||
|
||||
return r.status_code == 401
|
||||
|
||||
def __str__(self):
|
||||
return "Code signing token"
|
||||
|
||||
|
||||
@@ -7,8 +7,8 @@ class CoreSettingsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_view_core_settings")
|
||||
else:
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
return _has_perm(r, "can_edit_core_settings")
|
||||
|
||||
|
||||
class URLActionPerms(permissions.BasePermission):
|
||||
@@ -30,5 +30,5 @@ class CustomFieldPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_view_customfields")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_customfields")
|
||||
|
||||
return _has_perm(r, "can_manage_customfields")
|
||||
|
||||
@@ -53,6 +53,16 @@ def token_is_valid() -> tuple[str, bool]:
|
||||
return "", False
|
||||
|
||||
|
||||
def token_is_expired() -> bool:
|
||||
from core.models import CodeSignToken
|
||||
|
||||
t: "CodeSignToken" = CodeSignToken.objects.first()
|
||||
if not t or not t.token:
|
||||
return False
|
||||
|
||||
return t.is_expired
|
||||
|
||||
|
||||
def get_core_settings() -> "CoreSettings":
|
||||
from core.models import CORESETTINGS_CACHE_KEY, CoreSettings
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
import pytz
|
||||
@@ -6,8 +7,8 @@ from cryptography import x509
|
||||
from django.conf import settings
|
||||
from django.http import JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.utils import timezone as djangotime
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
@@ -73,6 +74,7 @@ def clear_cache(request):
|
||||
|
||||
@api_view()
|
||||
def dashboard_info(request):
|
||||
from core.utils import token_is_expired
|
||||
from tacticalrmm.utils import get_latest_trmm_ver
|
||||
|
||||
return Response(
|
||||
@@ -93,6 +95,7 @@ def dashboard_info(request):
|
||||
"hosted": getattr(settings, "HOSTED", False),
|
||||
"date_format": request.user.date_format,
|
||||
"default_date_format": get_core_settings().date_format,
|
||||
"token_is_expired": token_is_expired(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -175,8 +178,8 @@ class GetAddCustomFields(APIView):
|
||||
if "model" in request.data.keys():
|
||||
fields = CustomField.objects.filter(model=request.data["model"])
|
||||
return Response(CustomFieldSerializer(fields, many=True).data)
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
def post(self, request):
|
||||
serializer = CustomFieldSerializer(data=request.data, partial=True)
|
||||
@@ -231,7 +234,7 @@ class CodeSign(APIView):
|
||||
except Exception as e:
|
||||
return notify_error(str(e))
|
||||
|
||||
if r.status_code == 400 or r.status_code == 401:
|
||||
if r.status_code in (400, 401):
|
||||
return notify_error(r.json()["ret"])
|
||||
elif r.status_code == 200:
|
||||
t = CodeSignToken.objects.first()
|
||||
@@ -414,8 +417,7 @@ def status(request):
|
||||
mem_usage: int = round(psutil.virtual_memory().percent)
|
||||
|
||||
cert_file, _ = get_certs()
|
||||
with open(cert_file, "rb") as f:
|
||||
cert_bytes = f.read()
|
||||
cert_bytes = Path(cert_file).read_bytes()
|
||||
|
||||
cert = x509.load_pem_x509_certificate(cert_bytes)
|
||||
expires = pytz.utc.localize(cert.not_valid_after)
|
||||
|
||||
@@ -282,7 +282,7 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [DebugLogLevel.INFO]:
|
||||
if get_debug_level() == DebugLogLevel.INFO:
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.INFO,
|
||||
agent=agent,
|
||||
@@ -297,7 +297,7 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [DebugLogLevel.INFO, DebugLogLevel.WARN]:
|
||||
if get_debug_level() in (DebugLogLevel.INFO, DebugLogLevel.WARN):
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.INFO,
|
||||
agent=agent,
|
||||
@@ -312,11 +312,11 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [
|
||||
if get_debug_level() in (
|
||||
DebugLogLevel.INFO,
|
||||
DebugLogLevel.WARN,
|
||||
DebugLogLevel.ERROR,
|
||||
]:
|
||||
):
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.ERROR,
|
||||
agent=agent,
|
||||
@@ -331,12 +331,12 @@ class DebugLog(models.Model):
|
||||
agent: "Optional[Agent]" = None,
|
||||
log_type: str = DebugLogType.SYSTEM_ISSUES,
|
||||
) -> None:
|
||||
if get_debug_level() in [
|
||||
if get_debug_level() in (
|
||||
DebugLogLevel.INFO,
|
||||
DebugLogLevel.WARN,
|
||||
DebugLogLevel.ERROR,
|
||||
DebugLogLevel.CRITICAL,
|
||||
]:
|
||||
):
|
||||
cls.objects.create(
|
||||
log_level=DebugLogLevel.CRITICAL,
|
||||
agent=agent,
|
||||
@@ -376,8 +376,8 @@ class PendingAction(models.Model):
|
||||
return "Next update cycle"
|
||||
elif self.action_type == PAAction.CHOCO_INSTALL:
|
||||
return "ASAP"
|
||||
else:
|
||||
return "On next checkin"
|
||||
|
||||
return "On next checkin"
|
||||
|
||||
@property
|
||||
def description(self) -> Optional[str]:
|
||||
@@ -390,15 +390,15 @@ class PendingAction(models.Model):
|
||||
elif self.action_type == PAAction.CHOCO_INSTALL:
|
||||
return f"{self.details['name']} software install"
|
||||
|
||||
elif self.action_type in [
|
||||
elif self.action_type in (
|
||||
PAAction.RUN_CMD,
|
||||
PAAction.RUN_SCRIPT,
|
||||
PAAction.RUN_PATCH_SCAN,
|
||||
PAAction.RUN_PATCH_INSTALL,
|
||||
]:
|
||||
):
|
||||
return f"{self.action_type}"
|
||||
else:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class BaseAuditModel(models.Model):
|
||||
|
||||
@@ -16,8 +16,8 @@ class AuditLogSerializer(serializers.ModelSerializer):
|
||||
return SiteMinimumSerializer(
|
||||
Agent.objects.get(agent_id=obj.agent_id).site
|
||||
).data
|
||||
else:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = AuditLog
|
||||
|
||||
@@ -5,4 +5,5 @@ pytest
|
||||
pytest-django
|
||||
pytest-xdist
|
||||
pytest-cov
|
||||
codecov
|
||||
codecov
|
||||
refurb
|
||||
@@ -1,39 +1,39 @@
|
||||
asgiref==3.5.2
|
||||
celery==5.2.7
|
||||
certifi==2022.9.14
|
||||
certifi==2022.9.24
|
||||
cffi==1.15.1
|
||||
channels==3.0.5
|
||||
channels_redis==3.4.1
|
||||
channels==4.0.0
|
||||
channels_redis==4.0.0
|
||||
chardet==4.0.0
|
||||
cryptography==38.0.1
|
||||
daphne==3.0.2
|
||||
Django==4.1.1
|
||||
daphne==4.0.0
|
||||
Django==4.1.2
|
||||
django-cors-headers==3.13.0
|
||||
django-ipware==4.0.2
|
||||
django-rest-knox==4.2.0
|
||||
djangorestframework==3.14.0
|
||||
drf-spectacular==0.24.1
|
||||
drf-spectacular==0.24.2
|
||||
future==0.18.2
|
||||
msgpack==1.0.4
|
||||
nats-py==2.1.7
|
||||
psutil==5.9.2
|
||||
psycopg2-binary==2.9.3
|
||||
nats-py==2.2.0
|
||||
psutil==5.9.3
|
||||
psycopg2-binary==2.9.4
|
||||
pycparser==2.21
|
||||
pycryptodome==3.15.0
|
||||
pyotp==2.7.0
|
||||
pyparsing==3.0.9
|
||||
pytz==2022.2.1
|
||||
pytz==2022.5
|
||||
qrcode==7.3.1
|
||||
redis==4.3.4
|
||||
hiredis==2.0.0
|
||||
requests==2.28.1
|
||||
six==1.16.0
|
||||
sqlparse==0.4.2
|
||||
twilio==7.14.1
|
||||
sqlparse==0.4.3
|
||||
twilio==7.15.0
|
||||
urllib3==1.26.12
|
||||
uWSGI==2.0.20
|
||||
uWSGI==2.0.21
|
||||
validators==0.20.0
|
||||
vine==5.0.0
|
||||
websockets==10.3
|
||||
zipp==3.8.1
|
||||
zipp==3.9.0
|
||||
meshctrl==0.1.15
|
||||
|
||||
@@ -47,7 +47,7 @@ class Script(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def code_no_snippets(self):
|
||||
return self.script_body if self.script_body else ""
|
||||
return self.script_body or ""
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
@@ -66,11 +66,12 @@ class Script(BaseAuditModel):
|
||||
else:
|
||||
value = ""
|
||||
|
||||
replaced_code = re.sub(snippet.group(), value, replaced_code)
|
||||
|
||||
replaced_code = re.sub(
|
||||
snippet.group(), value.replace("\\", "\\\\"), replaced_code
|
||||
)
|
||||
return replaced_code
|
||||
else:
|
||||
return code
|
||||
|
||||
return code
|
||||
|
||||
def hash_script_body(self):
|
||||
from django.conf import settings
|
||||
@@ -112,14 +113,14 @@ class Script(BaseAuditModel):
|
||||
else 90
|
||||
)
|
||||
|
||||
args = script["args"] if "args" in script.keys() else list()
|
||||
args = script["args"] if "args" in script.keys() else []
|
||||
|
||||
syntax = script["syntax"] if "syntax" in script.keys() else ""
|
||||
|
||||
supported_platforms = (
|
||||
script["supported_platforms"]
|
||||
if "supported_platforms" in script.keys()
|
||||
else list()
|
||||
else []
|
||||
)
|
||||
|
||||
# if community script exists update it
|
||||
@@ -187,12 +188,12 @@ class Script(BaseAuditModel):
|
||||
return ScriptSerializer(script).data
|
||||
|
||||
@classmethod
|
||||
def parse_script_args(cls, agent, shell: str, args: List[str] = list()) -> list:
|
||||
def parse_script_args(cls, agent, shell: str, args: List[str] = []) -> list:
|
||||
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
temp_args = []
|
||||
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{(.*)\\}\\}.*")
|
||||
|
||||
@@ -7,5 +7,5 @@ class ScriptsPerms(permissions.BasePermission):
|
||||
def has_permission(self, r, view) -> bool:
|
||||
if r.method == "GET":
|
||||
return _has_perm(r, "can_list_scripts")
|
||||
else:
|
||||
return _has_perm(r, "can_manage_scripts")
|
||||
|
||||
return _has_perm(r, "can_manage_scripts")
|
||||
|
||||
@@ -9,5 +9,5 @@ class WinSvcsPerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_manage_winsvcs") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_manage_winsvcs")
|
||||
|
||||
return _has_perm(r, "can_manage_winsvcs")
|
||||
|
||||
@@ -41,7 +41,7 @@ class GetServices(APIView):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "winservices"}, timeout=10))
|
||||
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
agent.services = r
|
||||
|
||||
@@ -13,7 +13,6 @@ class SoftwarePerms(permissions.BasePermission):
|
||||
|
||||
return _has_perm(r, "can_list_software")
|
||||
|
||||
else:
|
||||
return _has_perm(r, "can_manage_software") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
return _has_perm(r, "can_manage_software") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
|
||||
@@ -22,8 +22,8 @@ def chocos(request):
|
||||
chocos = ChocoSoftware.objects.last()
|
||||
if not chocos:
|
||||
return Response({})
|
||||
else:
|
||||
return Response(chocos.chocos)
|
||||
|
||||
return Response(chocos.chocos)
|
||||
|
||||
|
||||
class GetSoftware(APIView):
|
||||
@@ -79,7 +79,7 @@ class GetSoftware(APIView):
|
||||
return notify_error(f"Not available for {agent.plat}")
|
||||
|
||||
r: Any = asyncio.run(agent.nats_cmd({"func": "softwarelist"}, timeout=15))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
if r in ("timeout", "natsdown"):
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
|
||||
@@ -95,6 +95,41 @@ disks_linux_deb = [
|
||||
},
|
||||
]
|
||||
|
||||
disks_mac = [
|
||||
{
|
||||
"free": "94.2 GB",
|
||||
"used": "134.1 GB",
|
||||
"total": "228.3 GB",
|
||||
"device": "/dev/disk3s1s1",
|
||||
"fstype": "apfs",
|
||||
"percent": 58,
|
||||
},
|
||||
{
|
||||
"free": "481.6 MB",
|
||||
"used": "18.4 MB",
|
||||
"total": "500.0 MB",
|
||||
"device": "/dev/disk1s3",
|
||||
"fstype": "apfs",
|
||||
"percent": 3,
|
||||
},
|
||||
{
|
||||
"free": "3.4 GB",
|
||||
"used": "1.6 GB",
|
||||
"total": "5.0 GB",
|
||||
"device": "/dev/disk2s1",
|
||||
"fstype": "apfs",
|
||||
"percent": 32,
|
||||
},
|
||||
{
|
||||
"free": "94.2 GB",
|
||||
"used": "134.1 GB",
|
||||
"total": "228.3 GB",
|
||||
"device": "/dev/disk3s1",
|
||||
"fstype": "apfs",
|
||||
"percent": 58,
|
||||
},
|
||||
]
|
||||
|
||||
wmi_deb = {
|
||||
"cpus": ["AMD Ryzen 9 3900X 12-Core Processor"],
|
||||
"gpus": ["Cirrus Logic GD 5446"],
|
||||
@@ -111,6 +146,22 @@ wmi_pi = {
|
||||
"make_model": "Raspberry Pi 2 Model B Rev 1.1",
|
||||
}
|
||||
|
||||
wmi_mac = {
|
||||
"cpus": ["Apple M1"],
|
||||
"gpus": [],
|
||||
"disks": [
|
||||
"Apple APPLE SSD AP0256Q SCSI SSD disk0 233.8 GB",
|
||||
"Apple APPLE SSD AP0256Q SCSI SSD disk1 500.0 MB",
|
||||
"Apple APPLE SSD AP0256Q SCSI SSD disk2 5.0 GB",
|
||||
"Apple APPLE SSD AP0256Q SCSI SSD disk3 228.3 GB",
|
||||
],
|
||||
"local_ips": [
|
||||
"192.168.45.113/24",
|
||||
"fe80::476:c390:c8dc:11af/64",
|
||||
],
|
||||
"make_model": "MacBookAir10,1",
|
||||
}
|
||||
|
||||
check_network_loc_aware_ps1 = r"""
|
||||
$networkstatus = Get-NetConnectionProfile | Select NetworkCategory | Out-String
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import threading
|
||||
from contextlib import suppress
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from django.conf import settings
|
||||
@@ -62,7 +63,7 @@ class AuditMiddleware:
|
||||
request = APIView().initialize_request(request)
|
||||
|
||||
# check if user is authenticated
|
||||
try:
|
||||
with suppress(AuthenticationFailed):
|
||||
if hasattr(request, "user") and request.user.is_authenticated:
|
||||
|
||||
try:
|
||||
@@ -83,8 +84,6 @@ class AuditMiddleware:
|
||||
|
||||
# get authenticated user after request
|
||||
request_local.username = request.user.username
|
||||
except AuthenticationFailed:
|
||||
pass
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
request_local.debug_info = None
|
||||
|
||||
@@ -26,7 +26,7 @@ class PermissionQuerySet(models.QuerySet):
|
||||
model_name = self.model._meta.label.split(".")[1]
|
||||
|
||||
# checks which sites and clients the user has access to and filters agents
|
||||
if model_name in ["Agent", "Deployment"]:
|
||||
if model_name in ("Agent", "Deployment"):
|
||||
if can_view_clients:
|
||||
clients_queryset = models.Q(site__client__in=can_view_clients)
|
||||
|
||||
@@ -81,7 +81,7 @@ class PermissionQuerySet(models.QuerySet):
|
||||
return self
|
||||
|
||||
# if model that is being filtered is a Check or Automated task we need to allow checks/tasks that are associated with policies
|
||||
if model_name in ["Check", "AutomatedTask", "DebugLog"] and (
|
||||
if model_name in ("Check", "AutomatedTask", "DebugLog") and (
|
||||
can_view_clients or can_view_sites
|
||||
):
|
||||
agent_queryset = models.Q(agent=None) # dont filter if agent is None
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
from contextlib import suppress
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
|
||||
@@ -19,26 +20,26 @@ MAC_UNINSTALL = BASE_DIR / "core" / "mac_uninstall.sh"
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.15.0"
|
||||
TRMM_VERSION = "0.15.2"
|
||||
|
||||
# https://github.com/amidaware/tacticalrmm-web
|
||||
WEB_VERSION = "0.101.0"
|
||||
WEB_VERSION = "0.101.5"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.171"
|
||||
APP_VER = "0.0.173"
|
||||
|
||||
# https://github.com/amidaware/rmmagent
|
||||
LATEST_AGENT_VER = "2.4.0"
|
||||
LATEST_AGENT_VER = "2.4.1"
|
||||
|
||||
MESH_VER = "1.0.85"
|
||||
MESH_VER = "1.0.90"
|
||||
|
||||
NATS_SERVER_VER = "2.9.1"
|
||||
NATS_SERVER_VER = "2.9.3"
|
||||
|
||||
# for the update script, bump when need to recreate venv
|
||||
PIP_VER = "32"
|
||||
PIP_VER = "33"
|
||||
|
||||
SETUPTOOLS_VER = "65.2.0"
|
||||
SETUPTOOLS_VER = "65.5.0"
|
||||
WHEEL_VER = "0.37.1"
|
||||
|
||||
AGENT_BASE_URL = "https://agents.tacticalrmm.com"
|
||||
@@ -73,10 +74,8 @@ HOSTED = False
|
||||
SWAGGER_ENABLED = False
|
||||
REDIS_HOST = "127.0.0.1"
|
||||
|
||||
try:
|
||||
with suppress(ImportError):
|
||||
from .local_settings import *
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if "GHACTIONS" in os.environ:
|
||||
DEBUG = False
|
||||
@@ -106,6 +105,7 @@ if not DEBUG:
|
||||
)
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"daphne",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
|
||||
@@ -28,7 +28,7 @@ from tacticalrmm.constants import (
|
||||
DebugLogType,
|
||||
ScriptShell,
|
||||
)
|
||||
from tacticalrmm.helpers import get_certs, notify_error, get_nats_ports
|
||||
from tacticalrmm.helpers import get_certs, get_nats_ports, notify_error
|
||||
|
||||
|
||||
def generate_winagent_exe(
|
||||
@@ -112,7 +112,7 @@ def bitdays_to_string(day: int) -> str:
|
||||
return "Every day"
|
||||
|
||||
for key, value in WEEK_DAYS.items():
|
||||
if day & int(value):
|
||||
if day & value:
|
||||
ret.append(key)
|
||||
return ", ".join(ret)
|
||||
|
||||
@@ -123,7 +123,7 @@ def bitmonths_to_string(month: int) -> str:
|
||||
return "Every month"
|
||||
|
||||
for key, value in MONTHS.items():
|
||||
if month & int(value):
|
||||
if month & value:
|
||||
ret.append(key)
|
||||
return ", ".join(ret)
|
||||
|
||||
@@ -134,7 +134,7 @@ def bitweeks_to_string(week: int) -> str:
|
||||
return "Every week"
|
||||
|
||||
for key, value in WEEKS.items():
|
||||
if week & int(value):
|
||||
if week & value:
|
||||
ret.append(key)
|
||||
return ", ".join(ret)
|
||||
|
||||
@@ -144,11 +144,11 @@ def bitmonthdays_to_string(day: int) -> str:
|
||||
|
||||
if day == MONTH_DAYS["Last Day"]:
|
||||
return "Last day"
|
||||
elif day == 2147483647 or day == 4294967295:
|
||||
elif day in (2147483647, 4294967295):
|
||||
return "Every day"
|
||||
|
||||
for key, value in MONTH_DAYS.items():
|
||||
if day & int(value):
|
||||
if day & value:
|
||||
ret.append(key)
|
||||
return ", ".join(ret)
|
||||
|
||||
@@ -157,8 +157,8 @@ def convert_to_iso_duration(string: str) -> str:
|
||||
tmp = string.upper()
|
||||
if "D" in tmp:
|
||||
return f"P{tmp.replace('D', 'DT')}"
|
||||
else:
|
||||
return f"PT{tmp}"
|
||||
|
||||
return f"PT{tmp}"
|
||||
|
||||
|
||||
def reload_nats() -> None:
|
||||
@@ -209,6 +209,11 @@ def reload_nats() -> None:
|
||||
},
|
||||
}
|
||||
|
||||
if "NATS_HTTP_PORT" in os.environ:
|
||||
config["http_port"] = int(os.getenv("NATS_HTTP_PORT")) # type: ignore
|
||||
elif hasattr(settings, "NATS_HTTP_PORT"):
|
||||
config["http_port"] = settings.NATS_HTTP_PORT # type: ignore
|
||||
|
||||
conf = os.path.join(settings.BASE_DIR, "nats-rmm.conf")
|
||||
with open(conf, "w") as f:
|
||||
json.dump(config, f)
|
||||
@@ -391,5 +396,5 @@ def format_shell_array(value: list[str]) -> str:
|
||||
def format_shell_bool(value: bool, shell: Optional[str]) -> str:
|
||||
if shell == ScriptShell.POWERSHELL:
|
||||
return "$True" if value else "$False"
|
||||
else:
|
||||
return "1" if value else "0"
|
||||
|
||||
return "1" if value else "0"
|
||||
|
||||
@@ -143,8 +143,8 @@ class WinUpdatePolicy(BaseAuditModel):
|
||||
def __str__(self):
|
||||
if self.agent:
|
||||
return self.agent.hostname
|
||||
else:
|
||||
return self.policy.name
|
||||
|
||||
return self.policy.name
|
||||
|
||||
@staticmethod
|
||||
def serialize(policy):
|
||||
|
||||
@@ -9,5 +9,5 @@ class AgentWinUpdatePerms(permissions.BasePermission):
|
||||
return _has_perm(r, "can_manage_winupdates") and _has_perm_on_agent(
|
||||
r.user, view.kwargs["agent_id"]
|
||||
)
|
||||
else:
|
||||
return _has_perm(r, "can_manage_winupdates")
|
||||
|
||||
return _has_perm(r, "can_manage_winupdates")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import time
|
||||
from contextlib import suppress
|
||||
|
||||
import pytz
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -123,10 +124,10 @@ def bulk_install_updates_task(pks: list[int]) -> None:
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
agent.delete_superseded_updates()
|
||||
try:
|
||||
|
||||
with suppress(Exception):
|
||||
agent.approve_updates()
|
||||
except:
|
||||
pass
|
||||
|
||||
nats_data = {
|
||||
"func": "installwinupdates",
|
||||
"guids": agent.get_approved_update_guids(),
|
||||
|
||||
@@ -27,24 +27,24 @@ if [ ! -f "/home/node/app/meshcentral-data/config.json" ] || [[ "${MESH_PERSISTE
|
||||
{
|
||||
"settings": {
|
||||
"mongodb": "${encoded_uri}",
|
||||
"Cert": "${MESH_HOST}",
|
||||
"TLSOffload": "${NGINX_HOST_IP}",
|
||||
"RedirPort": 8080,
|
||||
"cert": "${MESH_HOST}",
|
||||
"tlsOffload": "${NGINX_HOST_IP}",
|
||||
"redirPort": 8080,
|
||||
"WANonly": true,
|
||||
"Minify": 1,
|
||||
"Port": 4443,
|
||||
"AgentAliasPort": 443,
|
||||
"minify": 1,
|
||||
"port": 4443,
|
||||
"agentAliasPort": 443,
|
||||
"aliasPort": 443,
|
||||
"AllowLoginToken": true,
|
||||
"AllowFraming": true,
|
||||
"_AgentPing": 60,
|
||||
"AgentPong": 300,
|
||||
"AllowHighQualityDesktop": true,
|
||||
"allowLoginToken": true,
|
||||
"allowFraming": true,
|
||||
"_agentPing": 60,
|
||||
"agentPong": 300,
|
||||
"allowHighQualityDesktop": true,
|
||||
"agentCoreDump": false,
|
||||
"Compression": true,
|
||||
"WsCompression": true,
|
||||
"AgentWsCompression": true,
|
||||
"MaxInvalidLogin": {
|
||||
"compression": true,
|
||||
"wsCompression": true,
|
||||
"agentWsCompression": true,
|
||||
"maxInvalidLogin": {
|
||||
"time": 5,
|
||||
"count": 5,
|
||||
"coolofftime": 30
|
||||
@@ -52,12 +52,12 @@ if [ ! -f "/home/node/app/meshcentral-data/config.json" ] || [[ "${MESH_PERSISTE
|
||||
},
|
||||
"domains": {
|
||||
"": {
|
||||
"Title": "Tactical RMM",
|
||||
"Title2": "TacticalRMM",
|
||||
"NewAccounts": false,
|
||||
"title": "Tactical RMM",
|
||||
"title2": "TacticalRMM",
|
||||
"newAccounts": false,
|
||||
"mstsc": true,
|
||||
"GeoLocation": true,
|
||||
"CertUrl": "https://${NGINX_HOST_IP}:${NGINX_HOST_PORT}",
|
||||
"geoLocation": true,
|
||||
"certUrl": "https://${NGINX_HOST_IP}:${NGINX_HOST_PORT}",
|
||||
"agentConfig": [ "webSocketMaskOverride=${WS_MASK_OVERRIDE}" ]
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM nats:2.9.1-alpine
|
||||
FROM nats:2.9.3-alpine
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# creates python virtual env
|
||||
FROM python:3.10.6-slim AS CREATE_VENV_STAGE
|
||||
FROM python:3.10.8-slim AS CREATE_VENV_STAGE
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
@@ -21,14 +21,14 @@ RUN apt-get update && \
|
||||
pip install --no-cache-dir -r ${TACTICAL_TMP_DIR}/api/requirements.txt
|
||||
|
||||
# pulls community scripts from git repo
|
||||
FROM python:3.10.6-slim AS GET_SCRIPTS_STAGE
|
||||
FROM python:3.10.8-slim AS GET_SCRIPTS_STAGE
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends git && \
|
||||
git clone https://github.com/amidaware/community-scripts.git /community-scripts
|
||||
|
||||
# runtime image
|
||||
FROM python:3.10.6-slim
|
||||
FROM python:3.10.8-slim
|
||||
|
||||
# set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
|
||||
@@ -111,37 +111,6 @@ EOF
|
||||
|
||||
echo "${localvars}" > ${TACTICAL_DIR}/api/tacticalrmm/local_settings.py
|
||||
|
||||
uwsgiconf="$(cat << EOF
|
||||
[uwsgi]
|
||||
chdir = /opt/tactical/api
|
||||
module = tacticalrmm.wsgi
|
||||
home = /opt/venv
|
||||
master = true
|
||||
enable-threads = true
|
||||
socket = 0.0.0.0:8080
|
||||
harakiri = 300
|
||||
chmod-socket = 660
|
||||
buffer-size = 65535
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
max-requests = 500
|
||||
disable-logging = true
|
||||
cheaper-algo = busyness
|
||||
cheaper = 4
|
||||
cheaper-initial = 4
|
||||
workers = 20
|
||||
cheaper-step = 2
|
||||
cheaper-overload = 3
|
||||
cheaper-busyness-min = 5
|
||||
cheaper-busyness-max = 10
|
||||
# stats = /tmp/stats.socket # uncomment when debugging
|
||||
# cheaper-busyness-verbose = true # uncomment when debugging
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${uwsgiconf}" > ${TACTICAL_DIR}/api/uwsgi.ini
|
||||
|
||||
|
||||
# run migrations and init scripts
|
||||
python manage.py pre_update_tasks
|
||||
python manage.py migrate --no-input
|
||||
@@ -152,6 +121,7 @@ EOF
|
||||
python manage.py load_community_scripts
|
||||
python manage.py reload_nats
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py create_uwsgi_conf
|
||||
python manage.py create_installer_user
|
||||
python manage.py post_update_tasks
|
||||
|
||||
@@ -173,7 +143,7 @@ fi
|
||||
if [ "$1" = 'tactical-backend' ]; then
|
||||
check_tactical_ready
|
||||
|
||||
uwsgi ${TACTICAL_DIR}/api/uwsgi.ini
|
||||
uwsgi ${TACTICAL_DIR}/api/app.ini
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery' ]; then
|
||||
|
||||
8
go.mod
8
go.mod
@@ -6,8 +6,8 @@ require (
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/jmoiron/sqlx v1.3.5
|
||||
github.com/lib/pq v1.10.7
|
||||
github.com/nats-io/nats-server/v2 v2.9.0 // indirect
|
||||
github.com/nats-io/nats.go v1.16.1-0.20220906180156-a1017eec10b0
|
||||
github.com/nats-io/nats-server/v2 v2.9.3 // indirect
|
||||
github.com/nats-io/nats.go v1.18.0
|
||||
github.com/ugorji/go/codec v1.2.7
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20220227075846-f9f757361139
|
||||
google.golang.org/protobuf v1.28.0 // indirect
|
||||
@@ -19,7 +19,7 @@ require (
|
||||
github.com/nats-io/nkeys v0.3.0 // indirect
|
||||
github.com/nats-io/nuid v1.0.1 // indirect
|
||||
github.com/stretchr/testify v1.7.1 // indirect
|
||||
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90 // indirect
|
||||
golang.org/x/sys v0.0.0-20220906135438-9e1f76180b77 // indirect
|
||||
golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be // indirect
|
||||
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||
)
|
||||
|
||||
20
go.sum
20
go.sum
@@ -9,7 +9,7 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
||||
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
||||
github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY=
|
||||
github.com/klauspost/compress v1.15.11 h1:Lcadnb3RKGin4FYM/orgq0qde+nc15E5Cbqg4B9Sx9c=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw=
|
||||
github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
@@ -17,10 +17,10 @@ github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRU
|
||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
|
||||
github.com/nats-io/jwt/v2 v2.3.0 h1:z2mA1a7tIf5ShggOFlR1oBPgd6hGqcDYsISxZByUzdI=
|
||||
github.com/nats-io/nats-server/v2 v2.9.0 h1:DLWu+7/VgGOoChcDKytnUZPAmudpv7o/MhKmNrnH1RE=
|
||||
github.com/nats-io/nats-server/v2 v2.9.0/go.mod h1:BWKY6217RvhI+FDoOLZ2BH+hOC37xeKRBlQ1Lz7teKI=
|
||||
github.com/nats-io/nats.go v1.16.1-0.20220906180156-a1017eec10b0 h1:dPUKD6Iv8M1y9MU8PK6H4a4/12yx5/CbaYWz/Z1arY8=
|
||||
github.com/nats-io/nats.go v1.16.1-0.20220906180156-a1017eec10b0/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w=
|
||||
github.com/nats-io/nats-server/v2 v2.9.3 h1:HrfzA7G9LNetKkm1z+jU/e9kuAe+E6uaBuuq9EB5sQQ=
|
||||
github.com/nats-io/nats-server/v2 v2.9.3/go.mod h1:4sq8wvrpbvSzL1n3ZfEYnH4qeUuIl5W990j3kw13rRk=
|
||||
github.com/nats-io/nats.go v1.18.0 h1:o480Ao6kuSSFyJO75rGTXCEPj7LGkY84C1Ye+Uhm4c0=
|
||||
github.com/nats-io/nats.go v1.18.0/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w=
|
||||
github.com/nats-io/nkeys v0.3.0 h1:cgM5tL53EvYRU+2YLXIK0G2mJtK12Ft9oeooSZMA2G8=
|
||||
github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4=
|
||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||
@@ -39,16 +39,16 @@ github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20220227075846-f9f757361139 h1:PfOl03o+Y+svWrfXAAu1QWUDePu1yqTq0pf4rpnN8eA=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20220227075846-f9f757361139/go.mod h1:ILUz1utl5KgwrxmNHv0RpgMtKeh8gPAABvK2MiXBqv8=
|
||||
golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90 h1:Y/gsMcFOcR+6S6f3YeMKl5g+dZMEWqcz5Czj/GWYbkM=
|
||||
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be h1:fmw3UbQh+nxngCAHrDCCztao/kbYFnWjoqop8dHx05A=
|
||||
golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220906135438-9e1f76180b77 h1:C1tElbkWrsSkn3IRl1GCW/gETw1TywWIPgwZtXTZbYg=
|
||||
golang.org/x/sys v0.0.0-20220906135438-9e1f76180b77/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec h1:BkDtF2Ih9xZ7le9ndzTA7KJow28VbQW3odyk/8drmuI=
|
||||
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 h1:ftMN5LMiBFjbzleLqtoBZk7KdJwhuybIU+FckUHgoyQ=
|
||||
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af h1:Yx9k8YCG3dvF87UAn2tu2HQLf2dt/eR1bXxpLMWeH+Y=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
|
||||
86
install.sh
86
install.sh
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_VERSION="68"
|
||||
SCRIPT_VERSION="69"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/install.sh'
|
||||
|
||||
sudo apt install -y curl wget dirmngr gnupg lsb-release
|
||||
@@ -12,7 +12,7 @@ RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
|
||||
SCRIPTS_DIR='/opt/trmm-community-scripts'
|
||||
PYTHON_VER='3.10.6'
|
||||
PYTHON_VER='3.10.8'
|
||||
SETTINGS_FILE='/rmm/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
|
||||
TMP_FILE=$(mktemp -p "" "rmminstall_XXXXXXXXXX")
|
||||
@@ -168,7 +168,6 @@ CERT_PRIV_KEY=/etc/letsencrypt/live/${rootdomain}/privkey.pem
|
||||
CERT_PUB_KEY=/etc/letsencrypt/live/${rootdomain}/fullchain.pem
|
||||
|
||||
sudo chown ${USER}:${USER} -R /etc/letsencrypt
|
||||
sudo chmod 775 -R /etc/letsencrypt
|
||||
|
||||
print_green 'Installing Nginx'
|
||||
|
||||
@@ -325,34 +324,34 @@ sudo chown ${USER}:${USER} -R /meshcentral
|
||||
meshcfg="$(cat << EOF
|
||||
{
|
||||
"settings": {
|
||||
"Cert": "${meshdomain}",
|
||||
"MongoDb": "mongodb://127.0.0.1:27017",
|
||||
"MongoDbName": "meshcentral",
|
||||
"cert": "${meshdomain}",
|
||||
"mongoDb": "mongodb://127.0.0.1:27017",
|
||||
"mongoDbName": "meshcentral",
|
||||
"WANonly": true,
|
||||
"Minify": 1,
|
||||
"Port": 4430,
|
||||
"AliasPort": 443,
|
||||
"RedirPort": 800,
|
||||
"AllowLoginToken": true,
|
||||
"AllowFraming": true,
|
||||
"_AgentPing": 60,
|
||||
"AgentPong": 300,
|
||||
"AllowHighQualityDesktop": true,
|
||||
"TlsOffload": "127.0.0.1",
|
||||
"minify": 1,
|
||||
"port": 4430,
|
||||
"aliasPort": 443,
|
||||
"redirPort": 800,
|
||||
"allowLoginToken": true,
|
||||
"allowFraming": true,
|
||||
"_agentPing": 60,
|
||||
"agentPong": 300,
|
||||
"allowHighQualityDesktop": true,
|
||||
"tlsOffload": "127.0.0.1",
|
||||
"agentCoreDump": false,
|
||||
"Compression": true,
|
||||
"WsCompression": true,
|
||||
"AgentWsCompression": true,
|
||||
"MaxInvalidLogin": { "time": 5, "count": 5, "coolofftime": 30 }
|
||||
"compression": true,
|
||||
"wsCompression": true,
|
||||
"agentWsCompression": true,
|
||||
"maxInvalidLogin": { "time": 5, "count": 5, "coolofftime": 30 }
|
||||
},
|
||||
"domains": {
|
||||
"": {
|
||||
"Title": "Tactical RMM",
|
||||
"Title2": "Tactical RMM",
|
||||
"NewAccounts": false,
|
||||
"CertUrl": "https://${meshdomain}:443/",
|
||||
"GeoLocation": true,
|
||||
"CookieIpCheck": false,
|
||||
"title": "Tactical RMM",
|
||||
"title2": "Tactical RMM",
|
||||
"newAccounts": false,
|
||||
"certUrl": "https://${meshdomain}:443/",
|
||||
"geoLocation": true,
|
||||
"cookieIpCheck": false,
|
||||
"mstsc": true
|
||||
}
|
||||
}
|
||||
@@ -412,6 +411,7 @@ pip install --no-cache-dir -r /rmm/api/tacticalrmm/requirements.txt
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py create_uwsgi_conf
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
WEB_VERSION=$(python manage.py get_config webversion)
|
||||
@@ -430,36 +430,6 @@ python manage.py generate_barcode ${RANDBASE} ${djangousername} ${frontenddomain
|
||||
deactivate
|
||||
read -n 1 -s -r -p "Press any key to continue..."
|
||||
|
||||
uwsgini="$(cat << EOF
|
||||
[uwsgi]
|
||||
chdir = /rmm/api/tacticalrmm
|
||||
module = tacticalrmm.wsgi
|
||||
home = /rmm/api/env
|
||||
master = true
|
||||
enable-threads = true
|
||||
socket = /rmm/api/tacticalrmm/tacticalrmm.sock
|
||||
harakiri = 300
|
||||
chmod-socket = 660
|
||||
buffer-size = 65535
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
max-requests = 500
|
||||
disable-logging = true
|
||||
cheaper-algo = busyness
|
||||
cheaper = 4
|
||||
cheaper-initial = 4
|
||||
workers = 20
|
||||
cheaper-step = 2
|
||||
cheaper-overload = 3
|
||||
cheaper-busyness-min = 5
|
||||
cheaper-busyness-max = 10
|
||||
# stats = /tmp/stats.socket # uncomment when debugging
|
||||
# cheaper-busyness-verbose = true # uncomment when debugging
|
||||
EOF
|
||||
)"
|
||||
echo "${uwsgini}" > /rmm/api/tacticalrmm/app.ini
|
||||
|
||||
|
||||
rmmservice="$(cat << EOF
|
||||
[Unit]
|
||||
Description=tacticalrmm uwsgi daemon
|
||||
@@ -482,7 +452,7 @@ echo "${rmmservice}" | sudo tee /etc/systemd/system/rmm.service > /dev/null
|
||||
|
||||
daphneservice="$(cat << EOF
|
||||
[Unit]
|
||||
Description=django channels daemon
|
||||
Description=django channels daemon v2
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
@@ -491,6 +461,8 @@ Group=www-data
|
||||
WorkingDirectory=/rmm/api/tacticalrmm
|
||||
Environment="PATH=/rmm/api/env/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
ExecStart=/rmm/api/env/bin/daphne -u /rmm/daphne.sock tacticalrmm.asgi:application
|
||||
ExecStartPre=rm -f /rmm/daphne.sock
|
||||
ExecStartPre=rm -f /rmm/daphne.sock.lock
|
||||
Restart=always
|
||||
RestartSec=3s
|
||||
|
||||
|
||||
2
main.go
2
main.go
@@ -12,7 +12,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
version = "3.3.0"
|
||||
version = "3.3.1"
|
||||
log = logrus.New()
|
||||
)
|
||||
|
||||
|
||||
Binary file not shown.
35
restore.sh
35
restore.sh
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_VERSION="42"
|
||||
SCRIPT_VERSION="43"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/restore.sh'
|
||||
|
||||
sudo apt update
|
||||
@@ -13,7 +13,7 @@ RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
|
||||
SCRIPTS_DIR='/opt/trmm-community-scripts'
|
||||
PYTHON_VER='3.10.6'
|
||||
PYTHON_VER='3.10.8'
|
||||
SETTINGS_FILE='/rmm/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
|
||||
TMP_FILE=$(mktemp -p "" "rmmrestore_XXXXXXXXXX")
|
||||
@@ -162,7 +162,6 @@ sudo rm -rf /etc/letsencrypt
|
||||
sudo mkdir /etc/letsencrypt
|
||||
sudo tar -xzf $tmp_dir/certs/etc-letsencrypt.tar.gz -C /etc/letsencrypt
|
||||
sudo chown ${USER}:${USER} -R /etc/letsencrypt
|
||||
sudo chmod 775 -R /etc/letsencrypt
|
||||
|
||||
print_green 'Restoring celery configs'
|
||||
|
||||
@@ -259,35 +258,6 @@ npm install meshcentral@${MESH_VER}
|
||||
|
||||
print_green 'Restoring the backend'
|
||||
|
||||
uwsgini="$(cat << EOF
|
||||
[uwsgi]
|
||||
chdir = /rmm/api/tacticalrmm
|
||||
module = tacticalrmm.wsgi
|
||||
home = /rmm/api/env
|
||||
master = true
|
||||
enable-threads = true
|
||||
socket = /rmm/api/tacticalrmm/tacticalrmm.sock
|
||||
harakiri = 300
|
||||
chmod-socket = 660
|
||||
buffer-size = 65535
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
max-requests = 500
|
||||
disable-logging = true
|
||||
cheaper-algo = busyness
|
||||
cheaper = 4
|
||||
cheaper-initial = 4
|
||||
workers = 20
|
||||
cheaper-step = 2
|
||||
cheaper-overload = 3
|
||||
cheaper-busyness-min = 5
|
||||
cheaper-busyness-max = 10
|
||||
# stats = /tmp/stats.socket # uncomment when debugging
|
||||
# cheaper-busyness-verbose = true # uncomment when debugging
|
||||
EOF
|
||||
)"
|
||||
echo "${uwsgini}" > /rmm/api/tacticalrmm/app.ini
|
||||
|
||||
cp $tmp_dir/rmm/local_settings.py /rmm/api/tacticalrmm/tacticalrmm/
|
||||
cp $tmp_dir/rmm/env /rmm/web/.env
|
||||
gzip -d $tmp_dir/rmm/debug.log.gz
|
||||
@@ -326,6 +296,7 @@ pip install --no-cache-dir -r /rmm/api/tacticalrmm/requirements.txt
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py create_uwsgi_conf
|
||||
python manage.py reload_nats
|
||||
python manage.py post_update_tasks
|
||||
API=$(python manage.py get_config api)
|
||||
|
||||
65
update.sh
65
update.sh
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SCRIPT_VERSION="139"
|
||||
SCRIPT_VERSION="140"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/update.sh'
|
||||
LATEST_SETTINGS_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
YELLOW='\033[1;33m'
|
||||
@@ -10,7 +10,7 @@ NC='\033[0m'
|
||||
THIS_SCRIPT=$(readlink -f "$0")
|
||||
|
||||
SCRIPTS_DIR='/opt/trmm-community-scripts'
|
||||
PYTHON_VER='3.10.6'
|
||||
PYTHON_VER='3.10.8'
|
||||
SETTINGS_FILE='/rmm/api/tacticalrmm/tacticalrmm/settings.py'
|
||||
|
||||
TMP_FILE=$(mktemp -p "" "rmmupdate_XXXXXXXXXX")
|
||||
@@ -127,37 +127,34 @@ printf >&2 "${GREEN}Stopping ${i} service...${NC}\n"
|
||||
sudo systemctl stop ${i}
|
||||
done
|
||||
|
||||
rm -f /rmm/api/tacticalrmm/app.ini
|
||||
CHECK_DAPHNE=$(grep v2 /etc/systemd/system/daphne.service)
|
||||
if ! [[ $CHECK_DAPHNE ]]; then
|
||||
|
||||
uwsgini="$(cat << EOF
|
||||
[uwsgi]
|
||||
chdir = /rmm/api/tacticalrmm
|
||||
module = tacticalrmm.wsgi
|
||||
home = /rmm/api/env
|
||||
master = true
|
||||
enable-threads = true
|
||||
socket = /rmm/api/tacticalrmm/tacticalrmm.sock
|
||||
harakiri = 300
|
||||
chmod-socket = 660
|
||||
buffer-size = 65535
|
||||
vacuum = true
|
||||
die-on-term = true
|
||||
max-requests = 500
|
||||
disable-logging = true
|
||||
cheaper-algo = busyness
|
||||
cheaper = 4
|
||||
cheaper-initial = 4
|
||||
workers = 20
|
||||
cheaper-step = 2
|
||||
cheaper-overload = 3
|
||||
cheaper-busyness-min = 5
|
||||
cheaper-busyness-max = 10
|
||||
# stats = /tmp/stats.socket # uncomment when debugging
|
||||
# cheaper-busyness-verbose = true # uncomment when debugging
|
||||
sudo rm -f /etc/systemd/system/daphne.service
|
||||
|
||||
daphneservice="$(cat << EOF
|
||||
[Unit]
|
||||
Description=django channels daemon v2
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=${USER}
|
||||
Group=www-data
|
||||
WorkingDirectory=/rmm/api/tacticalrmm
|
||||
Environment="PATH=/rmm/api/env/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
ExecStart=/rmm/api/env/bin/daphne -u /rmm/daphne.sock tacticalrmm.asgi:application
|
||||
ExecStartPre=rm -f /rmm/daphne.sock
|
||||
ExecStartPre=rm -f /rmm/daphne.sock.lock
|
||||
Restart=always
|
||||
RestartSec=3s
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
)"
|
||||
echo "${uwsgini}" > /rmm/api/tacticalrmm/app.ini
|
||||
|
||||
echo "${daphneservice}" | sudo tee /etc/systemd/system/daphne.service > /dev/null
|
||||
sudo systemctl daemon-reload
|
||||
fi
|
||||
|
||||
if [ ! -f /etc/apt/sources.list.d/nginx.list ]; then
|
||||
osname=$(lsb_release -si); osname=${osname^}
|
||||
@@ -305,7 +302,6 @@ sudo chown ${USER}:${USER} -R ${SCRIPTS_DIR}
|
||||
sudo chown ${USER}:${USER} /var/log/celery
|
||||
sudo chown ${USER}:${USER} -R /etc/conf.d/
|
||||
sudo chown ${USER}:${USER} -R /etc/letsencrypt
|
||||
sudo chmod 775 -R /etc/letsencrypt
|
||||
|
||||
CHECK_CELERY_CONFIG=$(grep "autoscale=20,2" /etc/conf.d/celery.conf)
|
||||
if ! [[ $CHECK_CELERY_CONFIG ]]; then
|
||||
@@ -349,17 +345,12 @@ python manage.py reload_nats
|
||||
python manage.py load_chocos
|
||||
python manage.py create_installer_user
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py create_uwsgi_conf
|
||||
python manage.py post_update_tasks
|
||||
API=$(python manage.py get_config api)
|
||||
WEB_VERSION=$(python manage.py get_config webversion)
|
||||
deactivate
|
||||
|
||||
printf >&2 "${GREEN}Turning off redis aof${NC}\n"
|
||||
sudo redis-cli config set appendonly no
|
||||
sudo redis-cli config rewrite
|
||||
sudo rm -f /var/lib/redis/appendonly.aof
|
||||
|
||||
|
||||
if [ -d /rmm/web ]; then
|
||||
rm -rf /rmm/web
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user