Compare commits
160 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1e2a56c5e9 | ||
|
|
8011773af4 | ||
|
|
ddc69c692e | ||
|
|
df925c9744 | ||
|
|
1726341aad | ||
|
|
63b1ccc7a7 | ||
|
|
e80397c857 | ||
|
|
81aa7ca1a4 | ||
|
|
f0f7695890 | ||
|
|
e7e8ce2f7a | ||
|
|
ba37a3f18d | ||
|
|
60b11a7a5d | ||
|
|
29461c20a7 | ||
|
|
2ff1f34543 | ||
|
|
b75d7f970f | ||
|
|
204681f097 | ||
|
|
e239fe95a4 | ||
|
|
0a101f061a | ||
|
|
f112a17afa | ||
|
|
54658a66d2 | ||
|
|
6b8f5a76e4 | ||
|
|
623a5d338d | ||
|
|
9c5565cfd5 | ||
|
|
722f2efaee | ||
|
|
4928264204 | ||
|
|
12d62ddc2a | ||
|
|
da54e97217 | ||
|
|
9c0993dac8 | ||
|
|
175486b7c4 | ||
|
|
4760a287f6 | ||
|
|
0237b48c87 | ||
|
|
95c9f22e6c | ||
|
|
9b001219d5 | ||
|
|
6ff15efc7b | ||
|
|
6fe1dccc7e | ||
|
|
1c80f6f3fa | ||
|
|
54d3177fdd | ||
|
|
a24ad245d2 | ||
|
|
f38cfdcadf | ||
|
|
92e4ad8ccd | ||
|
|
3f3ab088d2 | ||
|
|
2c2cbaa175 | ||
|
|
911b6bf863 | ||
|
|
31462cab64 | ||
|
|
1ee35da62d | ||
|
|
edf4815595 | ||
|
|
06ccee5d18 | ||
|
|
d5ad85725f | ||
|
|
4d5bddb413 | ||
|
|
2f4da7c381 | ||
|
|
8b845fce03 | ||
|
|
9fd15c38a9 | ||
|
|
ec1573d01f | ||
|
|
92ec1cc9e7 | ||
|
|
8b2f9665ce | ||
|
|
cb388a5a78 | ||
|
|
7f4389ae08 | ||
|
|
76d71beaa2 | ||
|
|
31bb9c2197 | ||
|
|
6a2cd5c45a | ||
|
|
520632514b | ||
|
|
f998b28d0b | ||
|
|
1a6587e9e6 | ||
|
|
9b4b729d19 | ||
|
|
e80345295e | ||
|
|
026c259a2e | ||
|
|
63474c2269 | ||
|
|
faa1a9312f | ||
|
|
23fa0726d5 | ||
|
|
22210eaf7d | ||
|
|
dcd8bee676 | ||
|
|
06f0fa8f0e | ||
|
|
6d0f9e2cd5 | ||
|
|
732afdb65d | ||
|
|
1a9e8742f7 | ||
|
|
b8eda37339 | ||
|
|
5107db6169 | ||
|
|
2c8f207454 | ||
|
|
489bc9c3b3 | ||
|
|
514713e883 | ||
|
|
17cc0cd09c | ||
|
|
4475df1295 | ||
|
|
fdad267cfd | ||
|
|
3684fc80f0 | ||
|
|
e97a5fef94 | ||
|
|
de2972631f | ||
|
|
e5b8fd67c8 | ||
|
|
5fade89e2d | ||
|
|
2eefedadb3 | ||
|
|
e63d7a0b8a | ||
|
|
2a1b1849fa | ||
|
|
0461cb7f19 | ||
|
|
0932e0be03 | ||
|
|
4638ac9474 | ||
|
|
d8d7255029 | ||
|
|
fa05276c3f | ||
|
|
e50a5d51d8 | ||
|
|
c03ba78587 | ||
|
|
ff07c69e7d | ||
|
|
735b84b26d | ||
|
|
8dd069ad67 | ||
|
|
1857e68003 | ||
|
|
ff2508382a | ||
|
|
9cb952b116 | ||
|
|
105e8089bb | ||
|
|
730f37f247 | ||
|
|
284716751f | ||
|
|
8d0db699bf | ||
|
|
53cf1cae58 | ||
|
|
307e4719e0 | ||
|
|
5effae787a | ||
|
|
6532be0b52 | ||
|
|
fb225a5347 | ||
|
|
b83830a45e | ||
|
|
ca28288c33 | ||
|
|
b6f8d9cb25 | ||
|
|
9cad0f11e5 | ||
|
|
807be08566 | ||
|
|
67f6a985f8 | ||
|
|
f87d54ae8d | ||
|
|
d894bf7271 | ||
|
|
56e0e5cace | ||
|
|
685084e784 | ||
|
|
cbeec5a973 | ||
|
|
3fff56bcd7 | ||
|
|
c504c23eec | ||
|
|
16dae5a655 | ||
|
|
e512c5ae7d | ||
|
|
094078b928 | ||
|
|
34fc3ff919 | ||
|
|
4391f48e78 | ||
|
|
775608a3c0 | ||
|
|
b326228901 | ||
|
|
b2e98173a8 | ||
|
|
65c9b7952c | ||
|
|
b9dc9e7d62 | ||
|
|
ce178d0354 | ||
|
|
a3ff6efebc | ||
|
|
6a9bc56723 | ||
|
|
c9ac158d25 | ||
|
|
4b937a0fe8 | ||
|
|
405bf26ac5 | ||
|
|
5dcda0e0a0 | ||
|
|
83e9b60308 | ||
|
|
10b40b4730 | ||
|
|
79d6d804ef | ||
|
|
e9c7b6d8f8 | ||
|
|
4fcfbfb3f4 | ||
|
|
30cde14ed3 | ||
|
|
cf76e6f538 | ||
|
|
d0f600ec8d | ||
|
|
675f9e956f | ||
|
|
381605a6bb | ||
|
|
0fce66062b | ||
|
|
747cc9e5da | ||
|
|
25a1b464da | ||
|
|
3b6738b547 | ||
|
|
fc93e3e97f | ||
|
|
0edbb13d48 | ||
|
|
673687341c |
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9.6-slim
|
||||
FROM python:3.9.9-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
@@ -13,10 +13,6 @@ EXPOSE 8000 8383 8005
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy nats-api file
|
||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/nats-api
|
||||
|
||||
# Copy dev python reqs
|
||||
COPY .devcontainer/requirements.txt /
|
||||
|
||||
|
||||
@@ -96,6 +96,7 @@ EOF
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_natsapi_conf
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py create_installer_user
|
||||
|
||||
# create super user
|
||||
|
||||
@@ -35,3 +35,4 @@ Pygments
|
||||
mypy
|
||||
pysnooper
|
||||
isort
|
||||
drf_spectacular
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -49,3 +49,5 @@ nats-rmm.conf
|
||||
docs/site/
|
||||
reset_db.sh
|
||||
run_go_cmd.py
|
||||
nats-api.conf
|
||||
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
import asyncio
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.utils import AGENT_DEFER, reload_nats
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Delete old agents"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--days",
|
||||
type=int,
|
||||
help="Delete agents that have not checked in for this many days",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--agentver",
|
||||
type=str,
|
||||
help="Delete agents that equal to or less than this version",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
help="This will delete agents",
|
||||
)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
days = kwargs["days"]
|
||||
agentver = kwargs["agentver"]
|
||||
delete = kwargs["delete"]
|
||||
|
||||
if not days and not agentver:
|
||||
self.stdout.write(
|
||||
self.style.ERROR("Must have at least one parameter: days or agentver")
|
||||
)
|
||||
return
|
||||
|
||||
q = Agent.objects.defer(*AGENT_DEFER)
|
||||
|
||||
agents = []
|
||||
if days:
|
||||
overdue = djangotime.now() - djangotime.timedelta(days=days)
|
||||
agents = [i for i in q if i.last_seen < overdue]
|
||||
|
||||
if agentver:
|
||||
agents = [i for i in q if pyver.parse(i.version) <= pyver.parse(agentver)]
|
||||
|
||||
if not agents:
|
||||
self.stdout.write(self.style.ERROR("No agents matched"))
|
||||
return
|
||||
|
||||
deleted_count = 0
|
||||
for agent in agents:
|
||||
s = f"{agent.hostname} | Version {agent.version} | Last Seen {agent.last_seen} | {agent.client} > {agent.site}"
|
||||
if delete:
|
||||
s = "Deleting " + s
|
||||
self.stdout.write(self.style.SUCCESS(s))
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
try:
|
||||
agent.delete()
|
||||
except Exception as e:
|
||||
err = f"Failed to delete agent {agent.hostname}: {str(e)}"
|
||||
self.stdout.write(self.style.ERROR(err))
|
||||
else:
|
||||
deleted_count += 1
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(s))
|
||||
|
||||
if delete:
|
||||
reload_nats()
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {deleted_count} agents"))
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"The above agents would be deleted. Run again with --delete to actually delete them."
|
||||
)
|
||||
)
|
||||
25
api/tacticalrmm/agents/management/commands/update_agents.py
Normal file
25
api/tacticalrmm/agents/management/commands/update_agents.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from agents.tasks import send_agent_update_task
|
||||
from tacticalrmm.utils import AGENT_DEFER
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Triggers an agent update task to run"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update: # type: ignore
|
||||
return
|
||||
|
||||
q = Agent.objects.defer(*AGENT_DEFER).exclude(version=settings.LATEST_AGENT_VER)
|
||||
agent_ids: list[str] = [
|
||||
i.agent_id
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
send_agent_update_task.delay(agent_ids=agent_ids)
|
||||
@@ -98,7 +98,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# check if new agent has been created
|
||||
# or check if policy have changed on agent
|
||||
# or if site has changed on agent and if so generate-policies
|
||||
# or if site has changed on agent and if so generate policies
|
||||
# or if agent was changed from server or workstation
|
||||
if (
|
||||
not old_agent
|
||||
@@ -109,10 +109,6 @@ class Agent(BaseAuditModel):
|
||||
):
|
||||
generate_agent_checks_task.delay(agents=[self.pk], create_tasks=True)
|
||||
|
||||
# calculate alert template for new agents
|
||||
if not old_agent:
|
||||
self.set_alert_template()
|
||||
|
||||
def __str__(self):
|
||||
return self.hostname
|
||||
|
||||
@@ -748,8 +744,8 @@ class Agent(BaseAuditModel):
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
DebugLog.error(agent=self, log_type="agent_issues", message=e)
|
||||
ret = str(e)
|
||||
DebugLog.error(agent=self, log_type="agent_issues", message=ret)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
|
||||
@@ -38,13 +38,15 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
client = serializers.ReadOnlyField(source="client.name")
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||
patches_last_installed = serializers.ReadOnlyField()
|
||||
last_seen = serializers.ReadOnlyField()
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
exclude = ["last_seen", "id", "patches_last_installed"]
|
||||
exclude = ["id"]
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
|
||||
@@ -12,10 +12,10 @@ from logs.models import DebugLog, PendingAction
|
||||
from packaging import version as pyver
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import run_nats_api_cmd
|
||||
|
||||
from agents.models import Agent
|
||||
from agents.utils import get_winagent_url
|
||||
from tacticalrmm.utils import AGENT_DEFER
|
||||
|
||||
|
||||
def agent_update(agent_id: str, force: bool = False) -> str:
|
||||
@@ -80,7 +80,7 @@ def force_code_sign(agent_ids: list[str]) -> None:
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(agent_ids: list[str]) -> None:
|
||||
chunks = (agent_ids[i : i + 30] for i in range(0, len(agent_ids), 30))
|
||||
chunks = (agent_ids[i : i + 50] for i in range(0, len(agent_ids), 50))
|
||||
for chunk in chunks:
|
||||
for agent_id in chunk:
|
||||
agent_update(agent_id)
|
||||
@@ -268,7 +268,7 @@ def run_script_email_results_task(
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
DebugLog.error(message=e)
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -299,25 +299,6 @@ def clear_faults_task(older_than_days: int) -> None:
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status == "online"]
|
||||
run_nats_api_cmd("wmi", ids, timeout=45)
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_checkin_task() -> None:
|
||||
run_nats_api_cmd("checkin", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_getinfo_task() -> None:
|
||||
run_nats_api_cmd("agentinfo", timeout=30)
|
||||
|
||||
|
||||
@app.task
|
||||
def prune_agent_history(older_than_days: int) -> str:
|
||||
from .models import AgentHistory
|
||||
@@ -331,9 +312,7 @@ def prune_agent_history(older_than_days: int) -> str:
|
||||
|
||||
@app.task
|
||||
def handle_agents_task() -> None:
|
||||
q = Agent.objects.prefetch_related("pendingactions", "autotasks").only(
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
q = Agent.objects.defer(*AGENT_DEFER)
|
||||
agents = [
|
||||
i
|
||||
for i in q
|
||||
|
||||
@@ -20,7 +20,12 @@ from core.models import CoreSettings
|
||||
from logs.models import AuditLog, DebugLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
||||
from tacticalrmm.utils import (
|
||||
get_default_timezone,
|
||||
notify_error,
|
||||
reload_nats,
|
||||
AGENT_DEFER,
|
||||
)
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
from tacticalrmm.permissions import (
|
||||
@@ -74,34 +79,13 @@ class GetAgents(APIView):
|
||||
or "detail" in request.query_params.keys()
|
||||
and request.query_params["detail"] == "true"
|
||||
):
|
||||
|
||||
agents = (
|
||||
Agent.objects.filter_by_role(request.user)
|
||||
Agent.objects.filter_by_role(request.user) # type: ignore
|
||||
.select_related("site", "policy", "alert_template")
|
||||
.prefetch_related("agentchecks")
|
||||
.filter(filter)
|
||||
.only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site",
|
||||
"policy",
|
||||
"alert_template",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
"pending_actions_count",
|
||||
"has_patches_pending",
|
||||
)
|
||||
.defer(*AGENT_DEFER)
|
||||
)
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
serializer = AgentTableSerializer(agents, many=True, context=ctx)
|
||||
@@ -109,7 +93,7 @@ class GetAgents(APIView):
|
||||
# if detail=false
|
||||
else:
|
||||
agents = (
|
||||
Agent.objects.filter_by_role(request.user)
|
||||
Agent.objects.filter_by_role(request.user) # type: ignore
|
||||
.select_related("site")
|
||||
.filter(filter)
|
||||
.only("agent_id", "hostname", "site")
|
||||
@@ -125,9 +109,7 @@ class GetUpdateDeleteAgent(APIView):
|
||||
# get agent details
|
||||
def get(self, request, agent_id):
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
return Response(
|
||||
AgentSerializer(agent, context={"default_tz": get_default_timezone()}).data
|
||||
)
|
||||
return Response(AgentSerializer(agent).data)
|
||||
|
||||
# edit agent
|
||||
def put(self, request, agent_id):
|
||||
|
||||
@@ -456,7 +456,8 @@ class Alert(models.Model):
|
||||
if match:
|
||||
name = match.group(1)
|
||||
|
||||
if hasattr(self, name):
|
||||
# check if attr exists and isn't a function
|
||||
if hasattr(self, name) and not callable(getattr(self, name)):
|
||||
value = f"'{getattr(self, name)}'"
|
||||
else:
|
||||
continue
|
||||
@@ -464,7 +465,7 @@ class Alert(models.Model):
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", value, arg)) # type: ignore
|
||||
except Exception as e:
|
||||
DebugLog.error(log_type="scripting", message=e)
|
||||
DebugLog.error(log_type="scripting", message=str(e))
|
||||
continue
|
||||
|
||||
else:
|
||||
|
||||
@@ -9,6 +9,7 @@ from model_bakery import baker, seq
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from agents.tasks import handle_agents_task
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
@@ -676,25 +677,14 @@ class TestAlertTasks(TacticalTestCase):
|
||||
url = "/api/v3/checkin/"
|
||||
|
||||
agent_template_text.version = settings.LATEST_AGENT_VER
|
||||
agent_template_text.last_seen = djangotime.now()
|
||||
agent_template_text.save()
|
||||
|
||||
agent_template_email.version = settings.LATEST_AGENT_VER
|
||||
agent_template_email.last_seen = djangotime.now()
|
||||
agent_template_email.save()
|
||||
|
||||
data = {
|
||||
"agent_id": agent_template_text.agent_id,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
data = {
|
||||
"agent_id": agent_template_email.agent_id,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
handle_agents_task()
|
||||
|
||||
recovery_sms.assert_called_with(
|
||||
pk=Alert.objects.get(agent=agent_template_text).pk
|
||||
@@ -1365,15 +1355,7 @@ class TestAlertTasks(TacticalTestCase):
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save()
|
||||
|
||||
url = "/api/v3/checkin/"
|
||||
|
||||
data = {
|
||||
"agent_id": agent.agent_id,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
handle_agents_task()
|
||||
|
||||
# this is what data should be
|
||||
data = {
|
||||
|
||||
@@ -130,42 +130,6 @@ class TestAPIv3(TacticalTestCase):
|
||||
self.assertIsInstance(r.json()["check_interval"], int)
|
||||
self.assertEqual(len(r.json()["checks"]), 15)
|
||||
|
||||
def test_checkin_patch(self):
|
||||
from logs.models import PendingAction
|
||||
|
||||
url = "/api/v3/checkin/"
|
||||
agent_updated = baker.make_recipe("agents.agent", version="1.3.0")
|
||||
PendingAction.objects.create(
|
||||
agent=agent_updated,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": agent_updated.winagent_dl,
|
||||
"version": agent_updated.version,
|
||||
"inno": agent_updated.win_inno_exe,
|
||||
},
|
||||
)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test agent failed to update and still on same version
|
||||
payload = {
|
||||
"func": "hello",
|
||||
"agent_id": agent_updated.agent_id,
|
||||
"version": "1.3.0",
|
||||
}
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test agent successful update
|
||||
payload["version"] = settings.LATEST_AGENT_VER
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
action = agent_updated.pendingactions.filter(action_type="agentupdate").first()
|
||||
self.assertEqual(action.status, "completed")
|
||||
action.delete()
|
||||
|
||||
@patch("apiv3.views.reload_nats")
|
||||
def test_agent_recovery(self, reload_nats):
|
||||
reload_nats.return_value = "ok"
|
||||
|
||||
@@ -23,7 +23,7 @@ from checks.serializers import CheckRunnerGetSerializer
|
||||
from checks.utils import bytes2human
|
||||
from logs.models import PendingAction, DebugLog
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
|
||||
@@ -32,55 +32,11 @@ class CheckIn(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
def put(self, request):
|
||||
"""
|
||||
!!! DEPRECATED AS OF AGENT 1.6.0 !!!
|
||||
!!! DEPRECATED AS OF AGENT 1.7.0 !!!
|
||||
Endpoint be removed in a future release
|
||||
"""
|
||||
from alerts.models import Alert
|
||||
|
||||
updated = False
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if pyver.parse(request.data["version"]) > pyver.parse(
|
||||
agent.version
|
||||
) or pyver.parse(request.data["version"]) == pyver.parse(
|
||||
settings.LATEST_AGENT_VER
|
||||
):
|
||||
updated = True
|
||||
agent.version = request.data["version"]
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["version", "last_seen"])
|
||||
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
if (
|
||||
updated
|
||||
and agent.pendingactions.filter( # type: ignore
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists()
|
||||
):
|
||||
agent.pendingactions.filter( # type: ignore
|
||||
action_type="agentupdate", status="pending"
|
||||
).update(status="completed")
|
||||
|
||||
# handles any alerting actions
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(agent)
|
||||
|
||||
# sync scheduled tasks
|
||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
if task.sync_status == "pendingdeletion":
|
||||
task.delete_task_on_agent()
|
||||
elif task.sync_status == "initial":
|
||||
task.modify_task_on_agent()
|
||||
elif task.sync_status == "notsynced":
|
||||
task.create_task_on_agent()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
|
||||
@@ -109,11 +65,8 @@ class CheckIn(APIView):
|
||||
return Response("ok")
|
||||
|
||||
if request.data["func"] == "software":
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
sw = request.data["software"]
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
@@ -168,18 +121,18 @@ class WinUpdates(APIView):
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
|
||||
needs_reboot: bool = request.data["needs_reboot"]
|
||||
agent.needs_reboot = needs_reboot
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["needs_reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
elif needs_reboot and reboot_policy == "required":
|
||||
reboot = True
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
@@ -371,6 +324,13 @@ class TaskRunner(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
new_task = serializer.save(last_run=djangotime.now())
|
||||
|
||||
AgentHistory.objects.create(
|
||||
agent=agent,
|
||||
type="task_run",
|
||||
script=task.script,
|
||||
script_results=request.data,
|
||||
)
|
||||
|
||||
# check if task is a collector and update the custom field
|
||||
if task.custom_field:
|
||||
if not task.stderr:
|
||||
@@ -500,11 +460,7 @@ class Software(APIView):
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
sw = request.data["software"]
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
@@ -570,7 +526,18 @@ class AgentRecovery(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent = get_object_or_404(
|
||||
Agent.objects.prefetch_related("recoveryactions").only(
|
||||
"pk", "agent_id", "last_seen"
|
||||
),
|
||||
agent_id=agentid,
|
||||
)
|
||||
|
||||
# TODO remove these 2 lines after agent v1.7.0 has been out for a while
|
||||
# this is handled now by nats-api service
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last() # type: ignore
|
||||
ret = {"mode": "pass", "shellcmd": ""}
|
||||
if recovery is None:
|
||||
|
||||
@@ -54,6 +54,8 @@ def generate_agent_checks_task(
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
agent.set_alert_template()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
|
||||
@@ -654,3 +654,9 @@ class TestTaskPermissions(TacticalTestCase):
|
||||
|
||||
self.check_authorized("post", url)
|
||||
self.check_not_authorized("post", unauthorized_url)
|
||||
|
||||
def test_policy_fields_to_copy_exists(self):
|
||||
fields = [i.name for i in AutomatedTask._meta.get_fields()]
|
||||
task = baker.make("autotasks.AutomatedTask")
|
||||
for i in task.policy_fields_to_copy: # type: ignore
|
||||
self.assertIn(i, fields)
|
||||
|
||||
@@ -1096,3 +1096,12 @@ class TestCheckPermissions(TacticalTestCase):
|
||||
|
||||
self.check_authorized("patch", url)
|
||||
self.check_not_authorized("patch", unauthorized_url)
|
||||
|
||||
def test_policy_fields_to_copy_exists(self):
|
||||
from .models import Check
|
||||
|
||||
fields = [i.name for i in Check._meta.get_fields()]
|
||||
check = baker.make("checks.Check")
|
||||
|
||||
for i in check.policy_fields_to_copy: # type: ignore
|
||||
self.assertIn(i, fields)
|
||||
|
||||
@@ -6,6 +6,7 @@ from django.db import models
|
||||
from agents.models import Agent
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.models import PermissionQuerySet
|
||||
from tacticalrmm.utils import AGENT_DEFER
|
||||
|
||||
|
||||
class Client(BaseAuditModel):
|
||||
@@ -73,29 +74,20 @@ class Client(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def agent_count(self) -> int:
|
||||
return Agent.objects.filter(site__client=self).count()
|
||||
return Agent.objects.defer(*AGENT_DEFER).filter(site__client=self).count()
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return (
|
||||
Agent.objects.filter(site__client=self, maintenance_mode=True).count() > 0
|
||||
Agent.objects.defer(*AGENT_DEFER)
|
||||
.filter(site__client=self, maintenance_mode=True)
|
||||
.count()
|
||||
> 0
|
||||
)
|
||||
|
||||
@property
|
||||
def has_failing_checks(self):
|
||||
agents = (
|
||||
Agent.objects.only(
|
||||
"pk",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
.filter(site__client=self)
|
||||
.prefetch_related("agentchecks", "autotasks")
|
||||
)
|
||||
|
||||
agents = Agent.objects.defer(*AGENT_DEFER).filter(site__client=self)
|
||||
data = {"error": False, "warning": False}
|
||||
|
||||
for agent in agents:
|
||||
@@ -194,23 +186,21 @@ class Site(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def agent_count(self) -> int:
|
||||
return Agent.objects.filter(site=self).count()
|
||||
return Agent.objects.defer(*AGENT_DEFER).filter(site=self).count()
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return Agent.objects.filter(site=self, maintenance_mode=True).count() > 0
|
||||
return (
|
||||
Agent.objects.defer(*AGENT_DEFER)
|
||||
.filter(site=self, maintenance_mode=True)
|
||||
.count()
|
||||
> 0
|
||||
)
|
||||
|
||||
@property
|
||||
def has_failing_checks(self):
|
||||
agents = (
|
||||
Agent.objects.only(
|
||||
"pk",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
Agent.objects.defer(*AGENT_DEFER)
|
||||
.filter(site=self)
|
||||
.prefetch_related("agentchecks", "autotasks")
|
||||
)
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate conf for nats-api"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
db = settings.DATABASES["default"]
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
"port": int(db["PORT"]),
|
||||
"dbname": db["NAME"],
|
||||
}
|
||||
conf = os.path.join(settings.BASE_DIR, "nats-api.conf")
|
||||
with open(conf, "w") as f:
|
||||
json.dump(config, f)
|
||||
@@ -1,3 +1,4 @@
|
||||
import base64
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from logs.models import PendingAction
|
||||
@@ -20,3 +21,15 @@ class Command(BaseCommand):
|
||||
for user in User.objects.filter(is_installer_user=True):
|
||||
user.block_dashboard_login = True
|
||||
user.save()
|
||||
|
||||
# convert script base64 field to text field
|
||||
user_scripts = Script.objects.exclude(script_type="builtin").filter(
|
||||
script_body=""
|
||||
)
|
||||
for script in user_scripts:
|
||||
# decode base64 string
|
||||
script.script_body = base64.b64decode(
|
||||
script.code_base64.encode("ascii", "ignore")
|
||||
).decode("ascii", "ignore")
|
||||
# script.hash_script_body() # also saves script
|
||||
script.save(update_fields=["script_body"])
|
||||
|
||||
@@ -119,7 +119,6 @@ class CoreSettings(BaseAuditModel):
|
||||
def sms_is_configured(self):
|
||||
return all(
|
||||
[
|
||||
self.sms_alert_recipients,
|
||||
self.twilio_auth_token,
|
||||
self.twilio_account_sid,
|
||||
self.twilio_number,
|
||||
@@ -131,7 +130,6 @@ class CoreSettings(BaseAuditModel):
|
||||
# smtp with username/password authentication
|
||||
if (
|
||||
self.smtp_requires_auth
|
||||
and self.email_alert_recipients
|
||||
and self.smtp_from_email
|
||||
and self.smtp_host
|
||||
and self.smtp_host_user
|
||||
@@ -142,7 +140,6 @@ class CoreSettings(BaseAuditModel):
|
||||
# smtp relay
|
||||
elif (
|
||||
not self.smtp_requires_auth
|
||||
and self.email_alert_recipients
|
||||
and self.smtp_from_email
|
||||
and self.smtp_host
|
||||
and self.smtp_port
|
||||
|
||||
@@ -9,6 +9,7 @@ from alerts.tasks import prune_resolved_alerts
|
||||
from core.models import CoreSettings
|
||||
from logs.tasks import prune_debug_log, prune_audit_log
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import AGENT_DEFER
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -58,9 +59,7 @@ def core_maintenance_tasks():
|
||||
def cache_db_fields_task():
|
||||
from agents.models import Agent
|
||||
|
||||
for agent in Agent.objects.prefetch_related("winupdates", "pendingactions").only(
|
||||
"pending_actions_count", "has_patches_pending", "pk"
|
||||
):
|
||||
for agent in Agent.objects.defer(*AGENT_DEFER):
|
||||
agent.pending_actions_count = agent.pendingactions.filter(
|
||||
status="pending"
|
||||
).count()
|
||||
|
||||
@@ -98,7 +98,7 @@ def dashboard_info(request):
|
||||
"client_tree_splitter": request.user.client_tree_splitter,
|
||||
"loading_bar_color": request.user.loading_bar_color,
|
||||
"clear_search_when_switching": request.user.clear_search_when_switching,
|
||||
"hosted": hasattr(settings, "HOSTED") and settings.HOSTED,
|
||||
"hosted": getattr(settings, "HOSTED", False),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from tacticalrmm.utils import notify_error, get_default_timezone
|
||||
from tacticalrmm.utils import notify_error, get_default_timezone, AGENT_DEFER
|
||||
from tacticalrmm.permissions import _audit_log_filter, _has_perm_on_agent
|
||||
|
||||
from .models import AuditLog, PendingAction, DebugLog
|
||||
@@ -93,10 +93,16 @@ class PendingActions(APIView):
|
||||
|
||||
def get(self, request, agent_id=None):
|
||||
if agent_id:
|
||||
agent = get_object_or_404(Agent, agent_id=agent_id)
|
||||
agent = get_object_or_404(
|
||||
Agent.objects.defer(*AGENT_DEFER), agent_id=agent_id
|
||||
)
|
||||
actions = PendingAction.objects.filter(agent=agent)
|
||||
else:
|
||||
actions = PendingAction.objects.filter_by_role(request.user)
|
||||
actions = (
|
||||
PendingAction.objects.select_related("agent")
|
||||
.defer("agent__services", "agent__wmi_detail")
|
||||
.filter_by_role(request.user) # type: ignore
|
||||
)
|
||||
|
||||
return Response(PendingActionSerializer(actions, many=True).data)
|
||||
|
||||
|
||||
@@ -8,4 +8,3 @@ Pygments
|
||||
isort
|
||||
mypy
|
||||
types-pytz
|
||||
types-pytz
|
||||
@@ -1,12 +1,12 @@
|
||||
asgiref==3.4.1
|
||||
asyncio-nats-client==0.11.4
|
||||
celery==5.1.2
|
||||
asyncio-nats-client==0.11.5
|
||||
celery==5.2.1
|
||||
certifi==2021.10.8
|
||||
cffi==1.15.0
|
||||
channels==3.0.4
|
||||
channels_redis==3.3.1
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.8
|
||||
cryptography==35.0.0
|
||||
daphne==3.0.2
|
||||
Django==3.2.9
|
||||
django-cors-headers==3.10.0
|
||||
@@ -15,9 +15,9 @@ django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.4
|
||||
future==0.18.2
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.2
|
||||
packaging==21.2
|
||||
psycopg2-binary==2.9.1
|
||||
msgpack==1.0.3
|
||||
packaging==21.3
|
||||
psycopg2-binary==2.9.2
|
||||
pycparser==2.21
|
||||
pycryptodome==3.11.0
|
||||
pyotp==2.6.0
|
||||
@@ -28,10 +28,11 @@ redis==3.5.3
|
||||
requests==2.26.0
|
||||
six==1.16.0
|
||||
sqlparse==0.4.2
|
||||
twilio==7.3.0
|
||||
twilio==7.3.1
|
||||
urllib3==1.26.7
|
||||
uWSGI==2.0.20
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
websockets==9.1
|
||||
zipp==3.6.0
|
||||
drf_spectacular==0.21.0
|
||||
@@ -9,6 +9,16 @@
|
||||
"category": "TRMM (Win):Browsers",
|
||||
"default_timeout": "300"
|
||||
},
|
||||
{
|
||||
"guid": "720edbb7-8faf-4a77-9283-29935e8880d0",
|
||||
"filename": "Win_Printer_ClearandRestart.bat",
|
||||
"submittedBy": "https://github.com/wh1te909",
|
||||
"name": "Printers - Clear all print jobs",
|
||||
"description": "This script will stop the spooler, delete all pending print jobs and restart the spooler",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Printing",
|
||||
"default_timeout": "300"
|
||||
},
|
||||
{
|
||||
"guid": "3ff6a386-11d1-4f9d-8cca-1b0563bb6443",
|
||||
"filename": "Win_Google_Chrome_Clear_Cache.ps1",
|
||||
@@ -19,6 +29,16 @@
|
||||
"category": "TRMM (Win):Browsers",
|
||||
"default_timeout": "300"
|
||||
},
|
||||
{
|
||||
"guid": "d3c74105-d1e5-40d8-94ff-b4d6b216fe0f",
|
||||
"filename": "Win_Chocolatey_List_Installed.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Chocolatey - List Installed apps",
|
||||
"description": "Lists apps locally installed by chocolatey",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "be1de837-f677-4ac5-aa0c-37a0fc9991fc",
|
||||
"filename": "Win_Install_Adobe_Reader.ps1",
|
||||
@@ -48,6 +68,16 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "5a60c13b-1882-4a92-bdfb-6dd1f6a11dd14",
|
||||
"filename": "Win_Windows_Update_RevertToDefault.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Windows Update - Re-enable Microsoft managed Windows Update",
|
||||
"description": "TRMM agent will set registry key to disable Windows Auto Updates. This will re-enable Windows standard update settings",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "81cc5bcb-01bf-4b0c-89b9-0ac0f3fe0c04",
|
||||
"filename": "Win_Windows_Update_Reset.ps1",
|
||||
@@ -63,7 +93,7 @@
|
||||
"filename": "Win_Start_Cleanup.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Disk - Cleanup C: drive",
|
||||
"description": "Cleans the C: drive's Window Temperary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"description": "Cleans the C: drive's Window Temporary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Maintenance",
|
||||
"default_timeout": "25000"
|
||||
@@ -102,9 +132,7 @@
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "TacticalRMM - Agent Rename",
|
||||
"description": "Updates the DisplayName registry entry for the Tactical RMM windows agent to your desired name. This script takes 1 required argument: the name you wish to set.",
|
||||
"args": [
|
||||
"<string>"
|
||||
],
|
||||
"syntax": "<string>",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):TacticalRMM Related"
|
||||
},
|
||||
@@ -114,9 +142,7 @@
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Bitlocker - Check Drive for Status",
|
||||
"description": "Runs a check on drive for Bitlocker status. Returns 0 if Bitlocker is not enabled, 1 if Bitlocker is enabled",
|
||||
"args": [
|
||||
"[Drive <string>]"
|
||||
],
|
||||
"syntax": "[Drive <string>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
@@ -147,6 +173,15 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "11be7136-0416-47b4-a6dd-9776fa857dca",
|
||||
"filename": "Win_Storage_CheckPools.ps1",
|
||||
"submittedBy": "https://github.com/wh1te909",
|
||||
"name": "Storage Pools - Check Health",
|
||||
"description": "Checks all storage pools for health, returns error 1 if unhealthy",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "cfa14c28-4dfc-4d4e-95ee-a380652e058d",
|
||||
"filename": "Win_Bios_Check.ps1",
|
||||
@@ -188,19 +223,31 @@
|
||||
"filename": "Win_Screenconnect_GetGUID.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Screenconnect - Get GUID for client",
|
||||
"description": "Returns Screenconnect GUID for client - Use with Custom Fields for later use. ",
|
||||
"description": "Returns Screenconnect GUID for client - Use with Custom Fields for later use.",
|
||||
"args": [
|
||||
"{{client.ScreenConnectService}}"
|
||||
],
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "bbe5645f-c8d8-4d86-bddd-c8dbea45c974",
|
||||
"filename": "Win_Splashtop_Get_ID.ps1",
|
||||
"submittedBy": "https://github.com/r3die",
|
||||
"name": "Splashtop - Get SUUID for client",
|
||||
"description": "Returns Splashtop SUUID for client - Use with Custom Fields for later use.",
|
||||
"args": [
|
||||
"{{agent.SplashtopSUUID}}"
|
||||
],
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
{
|
||||
"guid": "9cfdfe8f-82bf-4081-a59f-576d694f4649",
|
||||
"filename": "Win_Teamviewer_Get_ID.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "TeamViewer - Get ClientID for client",
|
||||
"description": "Returns Teamviwer ClientID for client - Use with Custom Fields for later use. ",
|
||||
"description": "Returns Teamviwer ClientID for client - Use with Custom Fields for later use.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
@@ -209,7 +256,7 @@
|
||||
"filename": "Win_AnyDesk_Get_Anynet_ID.ps1",
|
||||
"submittedBy": "https://github.com/meuchels",
|
||||
"name": "AnyDesk - Get AnyNetID for client",
|
||||
"description": "Returns AnyNetID for client - Use with Custom Fields for later use. ",
|
||||
"description": "Returns AnyNetID for client - Use with Custom Fields for later use.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Collectors"
|
||||
},
|
||||
@@ -241,21 +288,43 @@
|
||||
"category": "TRMM (Win):Updates",
|
||||
"default_timeout": "25000"
|
||||
},
|
||||
{
|
||||
"guid": "4d0ba685-2259-44be-9010-8ed2fa48bf74",
|
||||
"filename": "Win_Win11_Ready.ps1",
|
||||
"submittedBy": "https://github.com/adamjrberry/",
|
||||
"name": "Windows 11 Upgrade capable check",
|
||||
"description": "Checks to see if machine is Win11 capable",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates",
|
||||
"default_timeout": "3600"
|
||||
},
|
||||
{
|
||||
"guid": "375323e5-cac6-4f35-a304-bb7cef35902d",
|
||||
"filename": "Win_Disk_Status.ps1",
|
||||
"filename": "Win_Disk_Volume_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Disk Hardware Health Check (using Event Viewer errors)",
|
||||
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||
"name": "Disk Drive Volume Health Check (using Event Viewer errors)",
|
||||
"description": "Checks Drive Volumes for errors reported in event viewer within the last 24 hours",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "4ace28ee-98f7-4931-9ac9-0adaf1a757ed",
|
||||
"filename": "Win_Software_Install_Report.ps1",
|
||||
"submittedBy": "https://github.com/silversword",
|
||||
"name": "Software Install - Reports new installs",
|
||||
"description": "This will check for software install events in the application Event Viewer log. If a number is provided as a command parameter it will search that number of days back.",
|
||||
"syntax": "[<int>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "907652a5-9ec1-4759-9871-a7743f805ff2",
|
||||
"filename": "Win_Software_Uninstall.ps1",
|
||||
"submittedBy": "https://github.com/subzdev",
|
||||
"name": "Software Uninstaller - list, find, and uninstall most software",
|
||||
"description": "Allows listing, finding and uninstalling most software on Windows. There will be a best effort to uninstall silently if the silent uninstall string is not provided.",
|
||||
"syntax": "-list <string>\n[-u <uninstall string>]\n[-u quiet <uninstall string>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software",
|
||||
"default_timeout": "600"
|
||||
@@ -266,6 +335,7 @@
|
||||
"submittedBy": "https://github.com/jhtechIL/",
|
||||
"name": "BitDefender Gravity Zone Install",
|
||||
"description": "Installs BitDefender Gravity Zone, requires client custom field setup. See script comments for details",
|
||||
"syntax": "[-log]",
|
||||
"args": [
|
||||
"-url {{client.bdurl}}",
|
||||
"-exe {{client.bdexe}}"
|
||||
@@ -278,6 +348,7 @@
|
||||
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
|
||||
"filename": "Win_Defender_Enable.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"syntax": "[-NoControlledFolders]",
|
||||
"name": "Defender - Enable",
|
||||
"description": "Enables Windows Defender and sets preferences",
|
||||
"shell": "powershell",
|
||||
@@ -368,6 +439,7 @@
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender - Status Report",
|
||||
"description": "This will check for Malware and Antispyware within the last 24 hours and display, otherwise will report as Healthy. Command Parameter: (number) if provided will check that number of days back in the log.",
|
||||
"syntax": "[<int>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
@@ -403,6 +475,7 @@
|
||||
"filename": "Win_Display_Message_To_User.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Message Popup To User",
|
||||
"syntax": "<string>",
|
||||
"description": "Displays a popup message to the currently logged on user",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
@@ -412,6 +485,7 @@
|
||||
"filename": "Win_Antivirus_Verify.ps1",
|
||||
"submittedBy": "https://github.com/beejayzed",
|
||||
"name": "Antivirus - Verify Status",
|
||||
"syntax": "[-antivirusName <string>]",
|
||||
"description": "Verify and display status for all installed Antiviruses",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
@@ -431,11 +505,7 @@
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Chocolatey - Install, Uninstall and Upgrade Software",
|
||||
"description": "This script installs, uninstalls and updates software using Chocolatey with logic to slow tasks to minimize hitting community limits. Mode install/uninstall/upgrade Hosts x",
|
||||
"args": [
|
||||
"-$PackageName <string>",
|
||||
"[-Hosts <string>]",
|
||||
"[-mode {(install) | update | uninstall}]"
|
||||
],
|
||||
"syntax": "-$PackageName <string>\n[-Hosts <string>]\n[-mode {(install) | update | uninstall}]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey",
|
||||
"default_timeout": "600"
|
||||
@@ -460,10 +530,11 @@
|
||||
},
|
||||
{
|
||||
"guid": "71090fc4-faa6-460b-adb0-95d7863544e1",
|
||||
"filename": "Win_Check_Events_for_Bluescreens.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"filename": "Win_Bluescreen_Report.ps1",
|
||||
"submittedBy": "https://github.com/bbrendon",
|
||||
"name": "Event Viewer - Bluescreen Notification",
|
||||
"description": "Event Viewer Monitor - Notify Bluescreen events on your system",
|
||||
"syntax": "[<int>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
@@ -472,7 +543,8 @@
|
||||
"filename": "Win_Local_User_Created_Monitor.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - New User Notification",
|
||||
"description": "Event Viewer Monitor - Notify when new Local user is created",
|
||||
"description": "Event Viewer Monitor - Notify when new Local user is created. If parameter provided will search back that number of days",
|
||||
"syntax": "[<int>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
@@ -482,6 +554,7 @@
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - Task Scheduler New Item Notification",
|
||||
"description": "Event Viewer Monitor - Notify when new Task Scheduler item is created",
|
||||
"syntax": "[<int>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
@@ -500,12 +573,7 @@
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Rename Computer",
|
||||
"description": "Rename computer. First parameter will be new PC name. 2nd parameter if yes will auto-reboot machine",
|
||||
"args": [
|
||||
"-NewName <string>",
|
||||
"[-Username <string>]",
|
||||
"[-Password <string>]",
|
||||
"[-Restart]"
|
||||
],
|
||||
"syntax": "-NewName <string>\n[-Username <string>]\n[-Password <string>]\n[-Restart]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": 30
|
||||
@@ -516,9 +584,7 @@
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Power - Restart or Shutdown PC",
|
||||
"description": "Restart PC. Add parameter: shutdown if you want to shutdown computer",
|
||||
"args": [
|
||||
"[shutdown]"
|
||||
],
|
||||
"syntax": "[shutdown]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
@@ -697,6 +763,15 @@
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security"
|
||||
},
|
||||
{
|
||||
"guid": "43a3206d-f1cb-44ef-8405-aae4d33a0bad",
|
||||
"filename": "Win_Security_Audit.ps1",
|
||||
"submittedBy": "theinterwebs",
|
||||
"name": "Windows Security - Security Audit",
|
||||
"description": "Runs an Audit on many components of windows to check for security issues",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security"
|
||||
},
|
||||
{
|
||||
"guid": "7ea6a11a-05c0-4151-b5c1-cb8af029299f",
|
||||
"filename": "Win_AzureAD_Check_Connection_Status.ps1",
|
||||
@@ -757,13 +832,17 @@
|
||||
"submittedBy": "https://github.com/brodur",
|
||||
"name": "User - Create Local",
|
||||
"description": "Create a local user. Parameters are: username, password and optional: description, fullname, group (adds to Users if not specified)",
|
||||
"args": [
|
||||
"-username <string>",
|
||||
"-password <string>",
|
||||
"[-description <string>]",
|
||||
"[-fullname <string>]",
|
||||
"[-group <string>]"
|
||||
],
|
||||
"syntax": "-username <string>\n-password <string>\n[-description <string>]\n[-fullname <string>]\n[-group <string>]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):User Management"
|
||||
},
|
||||
{
|
||||
"guid": "6e27d5341-88fa-4c2f-9c91-c3aeb1740e85",
|
||||
"filename": "Win_User_EnableDisable.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "User - Enable or disable a user",
|
||||
"description": "Used to enable or disable local user",
|
||||
"syntax": "-Name <string>\n-Enabled { yes | no }",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):User Management"
|
||||
},
|
||||
@@ -810,6 +889,7 @@
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "EXAMPLE File Copying using powershell",
|
||||
"description": "Reference Script: Will need manual tweaking, for copying files/folders from paths/websites to local",
|
||||
"syntax": "-source <string>\n-destination <string>\n[-recursive {True | False}]",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Misc>Reference",
|
||||
"default_timeout": "1"
|
||||
@@ -829,6 +909,7 @@
|
||||
"filename": "Win_AD_Join_Computer.ps1",
|
||||
"submittedBy": "https://github.com/rfost52",
|
||||
"name": "AD - Join Computer to Domain",
|
||||
"syntax": "-domain <string>\n-password <string>\n-UserAccount ADMINaccount\n[-OUPath <OU=testOU,DC=test,DC=local>]",
|
||||
"description": "Join computer to a domain in Active Directory",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Active Directory",
|
||||
@@ -839,6 +920,7 @@
|
||||
"filename": "Win_Collect_System_Report_And_Email.ps1",
|
||||
"submittedBy": "https://github.com/rfost52",
|
||||
"name": "Collect System Report and Email",
|
||||
"syntax": "-agentname <string>\n-file <string enter file name with the extension .HTM or .HTML>\n-fromaddress <string>\n-toaddress <string>\n-smtpserver <string>\n-password <string>\n-port <int 587 is the standard port for sending mail over TLS>",
|
||||
"description": "Generates a system report in HTML format, then emails it",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
|
||||
18
api/tacticalrmm/scripts/migrations/0013_script_syntax.py
Normal file
18
api/tacticalrmm/scripts/migrations/0013_script_syntax.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.6 on 2021-11-13 16:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0012_auto_20210917_1954'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='syntax',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.6 on 2021-11-19 15:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0013_script_syntax'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='filename',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.9 on 2021-11-28 16:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0014_alter_script_filename'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='script_body',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='script_hash',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='script',
|
||||
name='code_base64',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
import base64
|
||||
import re
|
||||
import hmac
|
||||
import hashlib
|
||||
from typing import List
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
@@ -24,7 +25,7 @@ class Script(BaseAuditModel):
|
||||
guid = models.CharField(max_length=64, null=True, blank=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(null=True, blank=True, default="")
|
||||
filename = models.CharField(max_length=255) # deprecated
|
||||
filename = models.CharField(max_length=255, null=True, blank=True)
|
||||
shell = models.CharField(
|
||||
max_length=100, choices=SCRIPT_SHELLS, default="powershell"
|
||||
)
|
||||
@@ -37,9 +38,12 @@ class Script(BaseAuditModel):
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
syntax = TextField(null=True, blank=True)
|
||||
favorite = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=100, null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True, default="")
|
||||
script_body = models.TextField(blank=True, default="")
|
||||
script_hash = models.CharField(max_length=100, null=True, blank=True)
|
||||
code_base64 = models.TextField(blank=True, default="") # deprecated
|
||||
default_timeout = models.PositiveIntegerField(default=90)
|
||||
|
||||
def __str__(self):
|
||||
@@ -47,12 +51,7 @@ class Script(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def code_no_snippets(self):
|
||||
if self.code_base64:
|
||||
return base64.b64decode(self.code_base64.encode("ascii", "ignore")).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
return self.script_body if self.script_body else ""
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
@@ -77,6 +76,15 @@ class Script(BaseAuditModel):
|
||||
else:
|
||||
return code
|
||||
|
||||
def hash_script_body(self):
|
||||
from django.conf import settings
|
||||
|
||||
msg = self.code.encode()
|
||||
self.script_hash = hmac.new(
|
||||
settings.SECRET_KEY.encode(), msg, hashlib.sha256
|
||||
).hexdigest()
|
||||
self.save()
|
||||
|
||||
@classmethod
|
||||
def load_community_scripts(cls):
|
||||
import json
|
||||
@@ -99,6 +107,9 @@ class Script(BaseAuditModel):
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
|
||||
# used to remove scripts from DB that are removed from the json file and file system
|
||||
community_scripts_processed = [] # list of script guids
|
||||
|
||||
for script in info:
|
||||
if os.path.exists(os.path.join(scripts_dir, script["filename"])):
|
||||
s = cls.objects.filter(script_type="builtin", guid=script["guid"])
|
||||
@@ -115,83 +126,36 @@ class Script(BaseAuditModel):
|
||||
|
||||
args = script["args"] if "args" in script.keys() else []
|
||||
|
||||
syntax = script["syntax"] if "syntax" in script.keys() else ""
|
||||
|
||||
# if community script exists update it
|
||||
if s.exists():
|
||||
i = s.first()
|
||||
i.name = script["name"] # type: ignore
|
||||
i.description = script["description"] # type: ignore
|
||||
i.category = category # type: ignore
|
||||
i.shell = script["shell"] # type: ignore
|
||||
i.default_timeout = default_timeout # type: ignore
|
||||
i.args = args # type: ignore
|
||||
i: Script = s.get()
|
||||
i.name = script["name"]
|
||||
i.description = script["description"]
|
||||
i.category = category
|
||||
i.shell = script["shell"]
|
||||
i.default_timeout = default_timeout
|
||||
i.args = args
|
||||
i.syntax = syntax
|
||||
i.filename = script["filename"]
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
i.code_base64 = base64.b64encode(script_bytes).decode("ascii") # type: ignore
|
||||
i.script_body = f.read().decode("utf-8")
|
||||
# i.hash_script_body()
|
||||
i.save()
|
||||
|
||||
i.save( # type: ignore
|
||||
update_fields=[
|
||||
"name",
|
||||
"description",
|
||||
"category",
|
||||
"default_timeout",
|
||||
"code_base64",
|
||||
"shell",
|
||||
"args",
|
||||
]
|
||||
)
|
||||
|
||||
# check if script was added without a guid
|
||||
elif cls.objects.filter(
|
||||
script_type="builtin", name=script["name"]
|
||||
).exists():
|
||||
s = cls.objects.get(script_type="builtin", name=script["name"])
|
||||
|
||||
if not s.guid:
|
||||
print(f"Updating GUID for: {script['name']}")
|
||||
s.guid = script["guid"]
|
||||
s.name = script["name"]
|
||||
s.description = script["description"]
|
||||
s.category = category
|
||||
s.shell = script["shell"]
|
||||
s.default_timeout = default_timeout
|
||||
s.args = args
|
||||
|
||||
with open(
|
||||
os.path.join(scripts_dir, script["filename"]), "rb"
|
||||
) as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
s.code_base64 = base64.b64encode(script_bytes).decode(
|
||||
"ascii"
|
||||
)
|
||||
|
||||
s.save(
|
||||
update_fields=[
|
||||
"guid",
|
||||
"name",
|
||||
"description",
|
||||
"category",
|
||||
"default_timeout",
|
||||
"code_base64",
|
||||
"shell",
|
||||
"args",
|
||||
]
|
||||
)
|
||||
community_scripts_processed.append(i.guid)
|
||||
|
||||
# doesn't exist in database so create it
|
||||
else:
|
||||
print(f"Adding new community script: {script['name']}")
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
code_base64 = base64.b64encode(script_bytes).decode("ascii")
|
||||
script_body = f.read().decode("utf-8")
|
||||
|
||||
cls(
|
||||
code_base64=code_base64,
|
||||
new_script: Script = cls(
|
||||
script_body=script_body,
|
||||
guid=script["guid"],
|
||||
name=script["name"],
|
||||
description=script["description"],
|
||||
@@ -200,10 +164,24 @@ class Script(BaseAuditModel):
|
||||
category=category,
|
||||
default_timeout=default_timeout,
|
||||
args=args,
|
||||
).save()
|
||||
filename=script["filename"],
|
||||
syntax=syntax,
|
||||
)
|
||||
# new_script.hash_script_body() # also saves script
|
||||
new_script.save()
|
||||
|
||||
# delete community scripts that had their name changed
|
||||
cls.objects.filter(script_type="builtin", guid=None).delete()
|
||||
community_scripts_processed.append(new_script.guid)
|
||||
|
||||
# check for community scripts that were deleted from json and scripts folder
|
||||
count, _ = (
|
||||
Script.objects.filter(script_type="builtin")
|
||||
.exclude(guid__in=community_scripts_processed)
|
||||
.delete()
|
||||
)
|
||||
if count:
|
||||
print(
|
||||
f"Removing {count} community scripts that was removed from source repo"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def serialize(script):
|
||||
|
||||
@@ -16,10 +16,14 @@ class ScriptTableSerializer(ModelSerializer):
|
||||
"category",
|
||||
"favorite",
|
||||
"default_timeout",
|
||||
"syntax",
|
||||
"filename",
|
||||
]
|
||||
|
||||
|
||||
class ScriptSerializer(ModelSerializer):
|
||||
script_hash = ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Script
|
||||
fields = [
|
||||
@@ -30,17 +34,21 @@ class ScriptSerializer(ModelSerializer):
|
||||
"args",
|
||||
"category",
|
||||
"favorite",
|
||||
"code_base64",
|
||||
"script_body",
|
||||
"script_hash",
|
||||
"default_timeout",
|
||||
"syntax",
|
||||
"filename",
|
||||
]
|
||||
|
||||
|
||||
class ScriptCheckSerializer(ModelSerializer):
|
||||
code = ReadOnlyField()
|
||||
script_hash = ReadOnlyField
|
||||
|
||||
class Meta:
|
||||
model = Script
|
||||
fields = ["code", "shell"]
|
||||
fields = ["code", "shell", "script_hash"]
|
||||
|
||||
|
||||
class ScriptSnippetSerializer(ModelSerializer):
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
import json
|
||||
import os
|
||||
import hmac
|
||||
import hashlib
|
||||
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import override_settings
|
||||
from django.conf import settings
|
||||
from model_bakery import baker
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
@@ -31,6 +35,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@override_settings(SECRET_KEY="Test Secret Key")
|
||||
def test_add_script(self):
|
||||
url = f"/scripts/"
|
||||
|
||||
@@ -39,7 +44,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
"description": "Description",
|
||||
"shell": "powershell",
|
||||
"category": "New",
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"script_body": "Test Script",
|
||||
"default_timeout": 99,
|
||||
"args": ["hello", "world", r"{{agent.public_ip}}"],
|
||||
"favorite": False,
|
||||
@@ -48,11 +53,18 @@ class TestScriptViews(TacticalTestCase):
|
||||
# test without file upload
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||
self.assertEqual(Script.objects.get(name="Name").code, "Test")
|
||||
|
||||
new_script = Script.objects.filter(name="Name").get()
|
||||
self.assertTrue(new_script)
|
||||
|
||||
# correct_hash = hmac.new(
|
||||
# settings.SECRET_KEY.encode(), data["script_body"].encode(), hashlib.sha256
|
||||
# ).hexdigest()
|
||||
# self.assertEqual(new_script.script_hash, correct_hash)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@override_settings(SECRET_KEY="Test Secret Key")
|
||||
def test_modify_script(self):
|
||||
# test a call where script doesn't exist
|
||||
resp = self.client.put("/scripts/500/", format="json")
|
||||
@@ -66,7 +78,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
"name": script.name,
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"script_body": "Test Script Body", # Test
|
||||
"default_timeout": 13344556,
|
||||
}
|
||||
|
||||
@@ -75,14 +87,17 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
script = Script.objects.get(pk=script.pk)
|
||||
self.assertEquals(script.description, "Description Change")
|
||||
self.assertEquals(script.code, "Test")
|
||||
|
||||
# correct_hash = hmac.new(
|
||||
# settings.SECRET_KEY.encode(), data["script_body"].encode(), hashlib.sha256
|
||||
# ).hexdigest()
|
||||
# self.assertEqual(script.script_hash, correct_hash)
|
||||
|
||||
# test edit a builtin script
|
||||
|
||||
data = {
|
||||
"name": "New Name",
|
||||
"description": "New Desc",
|
||||
"code_base64": "VGVzdA==",
|
||||
"script_body": "aasdfdsf",
|
||||
} # Test
|
||||
builtin_script = baker.make_recipe("scripts.script", script_type="builtin")
|
||||
|
||||
@@ -94,7 +109,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"favorite": True,
|
||||
"code_base64": "VGVzdA==", # Test
|
||||
"script_body": "Test Script Body", # Test
|
||||
"default_timeout": 54345,
|
||||
}
|
||||
# test marking a builtin script as favorite
|
||||
@@ -166,29 +181,33 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
# test powershell file
|
||||
script = baker.make(
|
||||
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||
"scripts.Script", script_body="Test Script Body", shell="powershell"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test"}) # type: ignore
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test Script Body"}) # type: ignore
|
||||
|
||||
# test batch file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||
script = baker.make(
|
||||
"scripts.Script", script_body="Test Script Body", shell="cmd"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test"}) # type: ignore
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test Script Body"}) # type: ignore
|
||||
|
||||
# test python file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||
script = baker.make(
|
||||
"scripts.Script", script_body="Test Script Body", shell="python"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test"}) # type: ignore
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test Script Body"}) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import base64
|
||||
import asyncio
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -37,6 +36,8 @@ class GetAddScripts(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
|
||||
# obj.hash_script_body()
|
||||
|
||||
return Response(f"{obj.name} was added!")
|
||||
|
||||
|
||||
@@ -64,6 +65,8 @@ class GetUpdateDeleteScript(APIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
|
||||
# obj.hash_script_body()
|
||||
|
||||
return Response(f"{obj.name} was edited!")
|
||||
|
||||
def delete(self, request, pk):
|
||||
|
||||
@@ -10,7 +10,7 @@ from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.utils import filter_software, notify_error
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import ChocoSoftware, InstalledSoftware
|
||||
from .permissions import SoftwarePerms
|
||||
@@ -76,13 +76,11 @@ class GetSoftware(APIView):
|
||||
if r == "timeout" or r == "natsdown":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
sw = filter_software(r)
|
||||
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
InstalledSoftware(agent=agent, software=r).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first() # type: ignore
|
||||
s.software = sw
|
||||
s.software = r
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -20,8 +20,9 @@ app.accept_content = ["application/json"] # type: ignore
|
||||
app.result_serializer = "json" # type: ignore
|
||||
app.task_serializer = "json" # type: ignore
|
||||
app.conf.task_track_started = True
|
||||
app.autodiscover_tasks()
|
||||
app.conf.worker_proc_alive_timeout = 30
|
||||
app.conf.worker_max_tasks_per_child = 2
|
||||
app.autodiscover_tasks()
|
||||
|
||||
app.conf.beat_schedule = {
|
||||
"auto-approve-win-updates": {
|
||||
@@ -38,15 +39,7 @@ app.conf.beat_schedule = {
|
||||
},
|
||||
"handle-agents": {
|
||||
"task": "agents.tasks.handle_agents_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-agentinfo": {
|
||||
"task": "agents.tasks.agent_getinfo_task",
|
||||
"schedule": crontab(minute="*"),
|
||||
},
|
||||
"get-wmi": {
|
||||
"task": "agents.tasks.get_wmi_task",
|
||||
"schedule": crontab(minute=18, hour="*/5"),
|
||||
"schedule": crontab(minute="*/3"),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -59,11 +52,10 @@ def debug_task(self):
|
||||
@app.on_after_finalize.connect
|
||||
def setup_periodic_tasks(sender, **kwargs):
|
||||
|
||||
from agents.tasks import agent_outages_task, agent_checkin_task
|
||||
from agents.tasks import agent_outages_task
|
||||
from alerts.tasks import unsnooze_alerts
|
||||
from core.tasks import core_maintenance_tasks, cache_db_fields_task
|
||||
|
||||
sender.add_periodic_task(45.0, agent_checkin_task.s())
|
||||
sender.add_periodic_task(60.0, agent_outages_task.s())
|
||||
sender.add_periodic_task(60.0 * 30, core_maintenance_tasks.s())
|
||||
sender.add_periodic_task(60.0 * 60, unsnooze_alerts.s())
|
||||
|
||||
@@ -21,6 +21,7 @@ EXCLUDE_PATHS = (
|
||||
f"/{settings.ADMIN_URL}",
|
||||
"/logout",
|
||||
"/agents/installer",
|
||||
"/api/schema",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -15,24 +15,24 @@ EXE_DIR = os.path.join(BASE_DIR, "tacticalrmm/private/exe")
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# latest release
|
||||
TRMM_VERSION = "0.9.2"
|
||||
TRMM_VERSION = "0.10.4"
|
||||
|
||||
# bump this version everytime vue code is changed
|
||||
# to alert user they need to manually refresh their browser
|
||||
APP_VER = "0.0.150"
|
||||
APP_VER = "0.0.154"
|
||||
|
||||
# https://github.com/wh1te909/rmmagent
|
||||
LATEST_AGENT_VER = "1.6.2"
|
||||
LATEST_AGENT_VER = "1.7.1"
|
||||
|
||||
MESH_VER = "0.9.45"
|
||||
MESH_VER = "0.9.55"
|
||||
|
||||
NATS_SERVER_VER = "2.3.3"
|
||||
|
||||
# for the update script, bump when need to recreate venv or npm install
|
||||
PIP_VER = "23"
|
||||
NPM_VER = "24"
|
||||
PIP_VER = "24"
|
||||
NPM_VER = "26"
|
||||
|
||||
SETUPTOOLS_VER = "58.5.3"
|
||||
SETUPTOOLS_VER = "59.4.0"
|
||||
WHEEL_VER = "0.37.0"
|
||||
|
||||
DL_64 = f"https://github.com/wh1te909/rmmagent/releases/download/v{LATEST_AGENT_VER}/winagent-v{LATEST_AGENT_VER}.exe"
|
||||
@@ -65,6 +65,13 @@ REST_FRAMEWORK = {
|
||||
"knox.auth.TokenAuthentication",
|
||||
"tacticalrmm.auth.APIAuthentication",
|
||||
),
|
||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
|
||||
}
|
||||
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"TITLE": "Tactical RMM API",
|
||||
"DESCRIPTION": "Simple and Fast remote monitoring and management tool",
|
||||
"VERSION": TRMM_VERSION,
|
||||
}
|
||||
|
||||
if not "AZPIPELINE" in os.environ:
|
||||
@@ -97,6 +104,7 @@ INSTALLED_APPS = [
|
||||
"logs",
|
||||
"scripts",
|
||||
"alerts",
|
||||
"drf_spectacular",
|
||||
]
|
||||
|
||||
if not "AZPIPELINE" in os.environ:
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import json
|
||||
import os
|
||||
from unittest.mock import mock_open, patch
|
||||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.test import override_settings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .utils import (
|
||||
bitdays_to_string,
|
||||
filter_software,
|
||||
generate_winagent_exe,
|
||||
get_bit_days,
|
||||
reload_nats,
|
||||
run_nats_api_cmd,
|
||||
AGENT_DEFER,
|
||||
)
|
||||
|
||||
|
||||
@@ -78,12 +74,6 @@ class TestUtils(TacticalTestCase):
|
||||
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
@patch("subprocess.run")
|
||||
def test_run_nats_api_cmd(self, mock_subprocess):
|
||||
ids = ["a", "b", "c"]
|
||||
_ = run_nats_api_cmd("wmi", ids)
|
||||
mock_subprocess.assert_called_once()
|
||||
|
||||
def test_bitdays_to_string(self):
|
||||
a = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"]
|
||||
all_days = [
|
||||
@@ -104,11 +94,10 @@ class TestUtils(TacticalTestCase):
|
||||
r = bitdays_to_string(bit_weekdays)
|
||||
self.assertEqual(r, "Every day")
|
||||
|
||||
def test_filter_software(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/software1.json")
|
||||
) as f:
|
||||
sw = json.load(f)
|
||||
def test_defer_fields_exist(self):
|
||||
from agents.models import Agent
|
||||
|
||||
r = filter_software(sw)
|
||||
self.assertIsInstance(r, list)
|
||||
fields = [i.name for i in Agent._meta.get_fields()]
|
||||
|
||||
for i in AGENT_DEFER:
|
||||
self.assertIn(i, fields)
|
||||
|
||||
@@ -39,11 +39,23 @@ urlpatterns = [
|
||||
path("accounts/", include("accounts.urls")),
|
||||
]
|
||||
|
||||
if hasattr(settings, "ADMIN_ENABLED") and settings.ADMIN_ENABLED:
|
||||
if getattr(settings, "ADMIN_ENABLED", False):
|
||||
from django.contrib import admin
|
||||
|
||||
urlpatterns += (path(settings.ADMIN_URL, admin.site.urls),)
|
||||
|
||||
if getattr(settings, "SWAGGER_ENABLED", False):
|
||||
from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView
|
||||
|
||||
urlpatterns += (
|
||||
path("api/schema/", SpectacularAPIView.as_view(), name="schema"),
|
||||
path(
|
||||
"api/schema/swagger-ui/",
|
||||
SpectacularSwaggerView.as_view(url_name="schema"),
|
||||
name="swagger-ui",
|
||||
),
|
||||
)
|
||||
|
||||
ws_urlpatterns = [
|
||||
path("ws/dashinfo/", DashInfo.as_asgi()), # type: ignore
|
||||
]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import json
|
||||
import os
|
||||
import string
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
@@ -23,7 +22,7 @@ from agents.models import Agent
|
||||
|
||||
notify_error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
SoftwareList = list[dict[str, str]]
|
||||
AGENT_DEFER = ["wmi_detail", "services"]
|
||||
|
||||
WEEK_DAYS = {
|
||||
"Sunday": 0x1,
|
||||
@@ -147,26 +146,6 @@ def bitdays_to_string(day: int) -> str:
|
||||
return ", ".join(ret)
|
||||
|
||||
|
||||
def filter_software(sw: SoftwareList) -> SoftwareList:
|
||||
ret: SoftwareList = []
|
||||
printable = set(string.printable)
|
||||
for s in sw:
|
||||
ret.append(
|
||||
{
|
||||
"name": "".join(filter(lambda x: x in printable, s["name"])),
|
||||
"version": "".join(filter(lambda x: x in printable, s["version"])),
|
||||
"publisher": "".join(filter(lambda x: x in printable, s["publisher"])),
|
||||
"install_date": s["install_date"],
|
||||
"size": s["size"],
|
||||
"source": s["source"],
|
||||
"location": s["location"],
|
||||
"uninstall": s["uninstall"],
|
||||
}
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def reload_nats():
|
||||
users = [{"user": "tacticalrmm", "password": settings.SECRET_KEY}]
|
||||
agents = Agent.objects.prefetch_related("user").only(
|
||||
@@ -239,38 +218,6 @@ KnoxAuthMiddlewareStack = lambda inner: KnoxAuthMiddlewareInstance(
|
||||
)
|
||||
|
||||
|
||||
def run_nats_api_cmd(mode: str, ids: list[str] = [], timeout: int = 30) -> None:
|
||||
if mode == "wmi":
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"agents": ids,
|
||||
}
|
||||
else:
|
||||
db = settings.DATABASES["default"]
|
||||
config = {
|
||||
"key": settings.SECRET_KEY,
|
||||
"natsurl": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": db["USER"],
|
||||
"pass": db["PASSWORD"],
|
||||
"host": db["HOST"],
|
||||
"port": int(db["PORT"]),
|
||||
"dbname": db["NAME"],
|
||||
}
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir="/opt/tactical/tmp" if settings.DOCKER_BUILD else None
|
||||
) as fp:
|
||||
with open(fp.name, "w") as f:
|
||||
json.dump(config, f)
|
||||
|
||||
cmd = ["/usr/local/bin/nats-api", "-c", fp.name, "-m", mode]
|
||||
try:
|
||||
subprocess.run(cmd, timeout=timeout)
|
||||
except Exception as e:
|
||||
DebugLog.error(message=e)
|
||||
|
||||
|
||||
def get_latest_trmm_ver() -> str:
|
||||
url = "https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/api/tacticalrmm/tacticalrmm/settings.py"
|
||||
try:
|
||||
@@ -283,7 +230,7 @@ def get_latest_trmm_ver() -> str:
|
||||
if "TRMM_VERSION" in line:
|
||||
return line.split(" ")[2].strip('"')
|
||||
except Exception as e:
|
||||
DebugLog.error(message=e)
|
||||
DebugLog.error(message=str(e))
|
||||
|
||||
return "error"
|
||||
|
||||
@@ -352,7 +299,8 @@ def replace_db_values(
|
||||
if not obj:
|
||||
return ""
|
||||
|
||||
if hasattr(obj, temp[1]):
|
||||
# check if attr exists and isn't a function
|
||||
if hasattr(obj, temp[1]) and not callable(getattr(obj, temp[1])):
|
||||
value = f"'{getattr(obj, temp[1])}'" if quotes else getattr(obj, temp[1])
|
||||
|
||||
elif CustomField.objects.filter(model=model, name=temp[1]).exists():
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="15"
|
||||
SCRIPT_VERSION="16"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh'
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
@@ -75,9 +75,9 @@ sudo tar -czvf ${tmp_dir}/confd/etc-confd.tar.gz -C /etc/conf.d .
|
||||
|
||||
sudo gzip -9 -c /var/lib/redis/appendonly.aof > ${tmp_dir}/redis/appendonly.aof.gz
|
||||
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/meshcentral.service ${sysd}/nats.service ${tmp_dir}/systemd/
|
||||
if [ -f "${sysd}/daphne.service" ]; then
|
||||
sudo cp ${sysd}/daphne.service ${tmp_dir}/systemd/
|
||||
sudo cp ${sysd}/rmm.service ${sysd}/celery.service ${sysd}/celerybeat.service ${sysd}/meshcentral.service ${sysd}/nats.service ${sysd}/daphne.service ${tmp_dir}/systemd/
|
||||
if [ -f "${sysd}/nats-api.service" ]; then
|
||||
sudo cp ${sysd}/nats-api.service ${tmp_dir}/systemd/
|
||||
fi
|
||||
|
||||
cat /rmm/api/tacticalrmm/tacticalrmm/private/log/django_debug.log | gzip -9 > ${tmp_dir}/rmm/debug.log.gz
|
||||
|
||||
@@ -7,6 +7,9 @@ RUN apk add --no-cache inotify-tools supervisor bash
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/nats-api
|
||||
|
||||
COPY docker/containers/tactical-nats/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
|
||||
@@ -6,8 +6,10 @@ set -e
|
||||
|
||||
if [ "${DEV}" = 1 ]; then
|
||||
NATS_CONFIG=/workspace/api/tacticalrmm/nats-rmm.conf
|
||||
NATS_API_CONFIG=/workspace/api/tacticalrmm/nats-api.conf
|
||||
else
|
||||
NATS_CONFIG="${TACTICAL_DIR}/api/nats-rmm.conf"
|
||||
NATS_API_CONFIG="${TACTICAL_DIR}/api/nats-api.conf"
|
||||
fi
|
||||
|
||||
sleep 15
|
||||
@@ -37,6 +39,12 @@ stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
|
||||
[program:nats-api]
|
||||
command=/bin/bash -c "/usr/local/bin/nats-api -config ${NATS_API_CONFIG}"
|
||||
stdout_logfile=/dev/fd/1
|
||||
stdout_logfile_maxbytes=0
|
||||
redirect_stderr=true
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# creates python virtual env
|
||||
FROM python:3.9.6-slim AS CREATE_VENV_STAGE
|
||||
FROM python:3.9.9-slim AS CREATE_VENV_STAGE
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
@@ -23,7 +23,7 @@ RUN apt-get update && \
|
||||
|
||||
|
||||
# runtime image
|
||||
FROM python:3.9.6-slim
|
||||
FROM python:3.9.9-slim
|
||||
|
||||
# set env variables
|
||||
ENV VIRTUAL_ENV /opt/venv
|
||||
@@ -50,10 +50,6 @@ RUN apt-get update && \
|
||||
|
||||
SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"]
|
||||
|
||||
# copy nats-api file
|
||||
COPY natsapi/bin/nats-api /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/nats-api
|
||||
|
||||
# docker init
|
||||
COPY docker/containers/tactical/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
@@ -129,6 +129,7 @@ EOF
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
python manage.py reload_nats
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py create_installer_user
|
||||
|
||||
# create super user
|
||||
|
||||
@@ -8,17 +8,16 @@ networks:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.20.0.0/24
|
||||
api-db:
|
||||
redis:
|
||||
mesh-db:
|
||||
api-db: null
|
||||
redis: null
|
||||
mesh-db: null # docker managed persistent volumes
|
||||
|
||||
# docker managed persistent volumes
|
||||
volumes:
|
||||
tactical_data:
|
||||
postgres_data:
|
||||
mongo_data:
|
||||
mesh_data:
|
||||
redis_data:
|
||||
tactical_data: null
|
||||
postgres_data: null
|
||||
mongo_data: null
|
||||
mesh_data: null
|
||||
redis_data: null
|
||||
|
||||
services:
|
||||
# postgres database for api service
|
||||
@@ -41,7 +40,7 @@ services:
|
||||
image: redis:6.0-alpine
|
||||
command: redis-server --appendonly yes
|
||||
restart: always
|
||||
volumes:
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
- redis
|
||||
@@ -51,7 +50,7 @@ services:
|
||||
container_name: trmm-init
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
restart: on-failure
|
||||
command: ["tactical-init"]
|
||||
command: [ "tactical-init" ]
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||
@@ -63,13 +62,13 @@ services:
|
||||
TRMM_PASS: ${TRMM_PASS}
|
||||
depends_on:
|
||||
- tactical-postgres
|
||||
- tactical-meshcentral
|
||||
- tactical-meshcentral
|
||||
networks:
|
||||
- api-db
|
||||
- proxy
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
|
||||
|
||||
# nats
|
||||
tactical-nats:
|
||||
container_name: trmm-nats
|
||||
@@ -82,6 +81,7 @@ services:
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
networks:
|
||||
api-db: null
|
||||
proxy:
|
||||
aliases:
|
||||
- ${API_HOST}
|
||||
@@ -91,7 +91,7 @@ services:
|
||||
container_name: trmm-meshcentral
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
environment:
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
@@ -102,7 +102,7 @@ services:
|
||||
proxy:
|
||||
aliases:
|
||||
- ${MESH_HOST}
|
||||
mesh-db:
|
||||
mesh-db: null
|
||||
volumes:
|
||||
- tactical_data:/opt/tactical
|
||||
- mesh_data:/home/node/app/meshcentral-data
|
||||
@@ -137,7 +137,7 @@ services:
|
||||
tactical-backend:
|
||||
container_name: trmm-backend
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-backend"]
|
||||
command: [ "tactical-backend" ]
|
||||
restart: always
|
||||
networks:
|
||||
- proxy
|
||||
@@ -152,7 +152,7 @@ services:
|
||||
tactical-websockets:
|
||||
container_name: trmm-websockets
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-websockets"]
|
||||
command: [ "tactical-websockets" ]
|
||||
restart: always
|
||||
networks:
|
||||
- proxy
|
||||
@@ -188,7 +188,7 @@ services:
|
||||
tactical-celery:
|
||||
container_name: trmm-celery
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celery"]
|
||||
command: [ "tactical-celery" ]
|
||||
restart: always
|
||||
networks:
|
||||
- redis
|
||||
@@ -204,7 +204,7 @@ services:
|
||||
tactical-celerybeat:
|
||||
container_name: trmm-celerybeat
|
||||
image: ${IMAGE_REPO}tactical:${VERSION}
|
||||
command: ["tactical-celerybeat"]
|
||||
command: [ "tactical-celerybeat" ]
|
||||
restart: always
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
42
docs/docs/3rdparty_splashtop.md
Normal file
42
docs/docs/3rdparty_splashtop.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Splashtop
|
||||
|
||||
## Splashtop Integration
|
||||
|
||||
|
||||
From the UI go to **Settings > Global Settings > CUSTOM FIELDS > Agents**
|
||||
|
||||
Add Custom Field</br>
|
||||
**Target** = `Agent`</br>
|
||||
**Name** = `SplashtopSUUID`</br>
|
||||
**Field Type** = `Text`</br>
|
||||
|
||||

|
||||
|
||||
While in Global Settings go to **URL ACTIONS**
|
||||
|
||||
Add a URL Action</br>
|
||||
**Name** = `Splashtop`</br>
|
||||
**Description** = `Connect to a Splashtop client`</br>
|
||||
**URL Pattern** =
|
||||
|
||||
```html
|
||||
st-business://com.splashtop.business?account=&uuid={{agent.SplashtopSUUID}}&sessiontype=remote
|
||||
```
|
||||
|
||||
Navigate to an agent with Splashtop running (or apply using **Settings > Automation Manager**).</br>
|
||||
Go to Tasks.</br>
|
||||
Add Task</br>
|
||||
**Select Script** = `Splashtop - Get SUUID for client` (this is a builtin script from script library)</br>
|
||||
**Descriptive name of task** = `Obtain Splashtop SUUID from device registry.`</br>
|
||||
**Collector Task** = `CHECKED`</br>
|
||||
**Custom Field to update** = `SplashtopSUUID`</br>
|
||||
|
||||

|
||||
|
||||
Click **Next**</br>
|
||||
Check **Manual**</br>
|
||||
Click **Add Task**
|
||||
|
||||
Right click on the newly created task and click **Run Task Now**.
|
||||
|
||||
Give it a second to execute then right click the agent that you are working with and go to **Run URL Action > Splashtop**
|
||||
@@ -27,3 +27,9 @@ chmod +x backup.sh
|
||||
The backup tar file will be saved in `/rmmbackups` with the following format:
|
||||
|
||||
`rmm-backup-CURRENTDATETIME.tar`
|
||||
|
||||
## Video Walkthru
|
||||
|
||||
<div class="video-wrapper">
|
||||
<iframe width="320" height="180" src="https://www.youtube.com/embed/rC0NgYJUf_8" frameborder="0" allowfullscreen></iframe>
|
||||
</div>
|
||||
|
||||
@@ -87,7 +87,8 @@ npm install -g @quasar/cli
|
||||
quasar dev
|
||||
```
|
||||
|
||||
!!!info If you receive a CORS error when trying to log into your server via localhost or IP, try the following
|
||||
!!!info
|
||||
If you receive a CORS error when trying to log into your server via localhost or IP, try the following
|
||||
```bash
|
||||
rm -rf node_modules .quasar
|
||||
npm install
|
||||
|
||||
@@ -78,6 +78,12 @@ mkdocs is Exposed on Port: 8005
|
||||
|
||||
Open: [http://rmm.example.com:8005/](http://rmm.example.com:8005/)
|
||||
|
||||
!!!note
|
||||
If you add new mkdocs extensions you might need to:<br>
|
||||
- docker-compose down.<br>
|
||||
- Then delete the `/api/tacticalrmm/env/` folder.<br>
|
||||
- Then docker-compose up and it will download/rebuild new extensions
|
||||
|
||||
### View django administration
|
||||
|
||||
Open: [http://rmm.example.com:8000/admin/](http://rmm.example.com:8000/admin/)
|
||||
|
||||
18
docs/docs/functions/permissions.md
Normal file
18
docs/docs/functions/permissions.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# User Roles and Permissions
|
||||
|
||||
## Permission Manager
|
||||
|
||||
Make sure you've setup at least 1 valid (Super User aka Administrator) role under _Settings > Permission Manager_
|
||||
|
||||
1. Login as usual Tactical user
|
||||
2. Go to Settings - Permissions Manager
|
||||
3. Click New Role
|
||||
4. You can all the role anything, I called it Admins
|
||||
5. Tick the Super User Box/or relevant permissions required
|
||||
6. Click Save then exit Permissions Manager
|
||||
7. Go to Settings - Users
|
||||
8. Open current logged in user/or any other user and assign role (created above step 6) in the Role drop down box.
|
||||
9. Click Save
|
||||
|
||||
Once you've set that up a Super User role and assigned your primary user, you can create other Roles with more limited access.
|
||||
|
||||
25
docs/docs/guide_gettingstarted.md
Normal file
25
docs/docs/guide_gettingstarted.md
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
# TLRD Version
|
||||
|
||||
## At Install
|
||||
|
||||
Setup Email Alerts
|
||||
Setup SMS Alerts
|
||||
Setup Server Preferences
|
||||
General
|
||||
Time Zone
|
||||
Clear faults on agents that haven't checked in after (days)
|
||||
|
||||
Setup Automation Manager
|
||||
Default Profile for workstations
|
||||
|
||||
|
||||
## Every 75 days
|
||||
|
||||
OS updates
|
||||
reboot
|
||||
Backup
|
||||
TRMM Update
|
||||
|
||||
## Biannually
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
# How It All Works
|
||||
|
||||

|
||||
[](images/TacticalRMM-Network.png)
|
||||
|
||||
1. Agent installer steps
|
||||
Still need graphics for
|
||||
|
||||
2. Agent checks/tasks and how they work on the workstation/interact with server
|
||||
1. Agent installer steps
|
||||
|
||||
2. Agent checks/tasks and how they work on the workstation/interact with server
|
||||
|
||||
## Server
|
||||
|
||||
@@ -15,41 +17,307 @@ Has a postgres database located here:
|
||||
!!!description
|
||||
A web interface for the postgres database
|
||||
|
||||
### Services
|
||||
All Tactical RMM dependencies are listed [here](https://github.com/wh1te909/tacticalrmm/blob/develop/api/tacticalrmm/requirements.txt)
|
||||
|
||||
nginx
|
||||
### System Services
|
||||
|
||||
!!!description
|
||||
Web server that handles https traffic
|
||||
This lists the system services used by the server.
|
||||
|
||||
Log located at `/var/log/nginx`
|
||||
#### nginx web server
|
||||
|
||||
```bash
|
||||
tail /var/log/nginx
|
||||
```
|
||||
Nginx is the web server for the `rmm`, `api`, and `mesh` domains. All sites redirect port 80 (HTTP) to port 443 (HTTPS).
|
||||
|
||||
### Dependencies from [here](https://github.com/wh1te909/tacticalrmm/blob/develop/api/tacticalrmm/requirements.txt)
|
||||
!!! warning
|
||||
|
||||
[nats](https://nats.io/)
|
||||
nginx does not serve the NATS service on port 4222.
|
||||
|
||||
How communication between client and server bride NAT (Network Address Translation)
|
||||
???+ abstract "nginx configuration (a.k.a. sites available)"
|
||||
|
||||
[celery](https://github.com/celery/celery)
|
||||
- [nginx configuration docs](https://docs.nginx.com/nginx/admin-guide/basic-functionality/managing-configuration-files/)
|
||||
|
||||
!!!description
|
||||
Used to schedule tasks to be sent to Agent
|
||||
=== ":material-web: `rmm.example.com`"
|
||||
|
||||
This serves the frontend website that you intereact with.
|
||||
|
||||
- Config: `/etc/nginx/sites-enabled/frontend.conf`
|
||||
- root: `/var/www/rmm/dist`
|
||||
- Access log: `/var/log/nginx/frontend-access.log`
|
||||
- Error log: `/var/log/nginx/frontend-error.log`
|
||||
- TLS certificate: `/etc/letsencrypt/live/example.com/fullchain.pem`
|
||||
|
||||
=== ":material-web: `api.example.com`"
|
||||
|
||||
This serves the TRMM API for the frontend and agents.
|
||||
|
||||
- Config: `/etc/nginx/sites-enabled/rmm.conf`
|
||||
- roots:
|
||||
- `/rmm/api/tacticalrmm/static/`
|
||||
- `/rmm/api/tacticalrmm/tacticalrmm/private/`
|
||||
- Upstreams:
|
||||
- `unix://rmm/api/tacticalrmm/tacticalrmm.sock`
|
||||
- `unix://rmm/daphne.sock`
|
||||
- Access log: `/rmm/api/tacticalrmm/tacticalrmm/private/log/access.log`
|
||||
- Error log: `/rmm/api/tacticalrmm/tacticalrmm/private/log/error.log`
|
||||
- TLS certificate: `/etc/letsencrypt/live/example.com/fullchain.pem`
|
||||
|
||||
=== ":material-web: `mesh.example.com`"
|
||||
|
||||
This serves MeshCentral for remote access.
|
||||
|
||||
- Config: `/etc/nginx/sites-enabled/meshcentral.conf`
|
||||
- Upstream: `http://127.0.0.1:4430/`
|
||||
- Access log: `/var/log/nginx/access.log` (uses deafult)
|
||||
- Error log: `/var/log/nginx/error.log` (uses deafult)
|
||||
- TLS certificate: `/etc/letsencrypt/live/example.com/fullchain.pem`
|
||||
|
||||
=== ":material-web: default"
|
||||
|
||||
This is the default site installed with nginx. This listens on port 80 only.
|
||||
|
||||
- Config: `/etc/nginx/sites-enabled/default`
|
||||
- root: `/var/www/rmm/dist`
|
||||
- Access log: `/var/log/nginx/access.log` (uses deafult)
|
||||
- Error log: `/var/log/nginx/error.log` (uses deafult)
|
||||
|
||||
???+ note "systemd config"
|
||||
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full nginx.service`
|
||||
- Stop: `systemctl stop nginx.service`
|
||||
- Start: `systemctl start nginx.service`
|
||||
- Restart: `systemctl restart nginx.service`
|
||||
- Restart: `systemctl reload nginx.service` reloads the config without restarting
|
||||
- Test config: `nginx -t`
|
||||
- Listening process: `ss -tulnp | grep nginx`
|
||||
|
||||
=== ":material-ubuntu: standard"
|
||||
|
||||
- Service: `nginx.service`
|
||||
- Address: `0.0.0.0`
|
||||
- Port: 443
|
||||
- Exec: `/usr/sbin/nginx -g 'daemon on; master_process on;'`
|
||||
- Version: 1.18.0
|
||||
|
||||
=== ":material-docker: docker"
|
||||
|
||||
TBD - To Be Documented
|
||||
|
||||
#### Tactical RMM (Django uWSGI) service
|
||||
|
||||
Built on the Django framework, the Tactical RMM service is the heart of system by serving the API for the frontend and agents.
|
||||
|
||||
???+ note "systemd config"
|
||||
|
||||
- [uWSGI docs](https://uwsgi-docs.readthedocs.io/en/latest/index.html)
|
||||
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full rmm.service`
|
||||
- Stop: `systemctl stop rmm.service`
|
||||
- Start: `systemctl start rmm.service`
|
||||
- Restart: `systemctl restart rmm.service`
|
||||
- journalctl:
|
||||
- "tail" the logs: `journalctl --identifier uwsgi --follow`
|
||||
- View the logs: `journalctl --identifier uwsgi --since "30 minutes ago" | less`
|
||||
|
||||
=== ":material-ubuntu: standard"
|
||||
|
||||
- Service: `rmm.service`
|
||||
- Socket: `/rmm/api/tacticalrmm/tacticalrmm.sock`
|
||||
- uWSGI config: `/rmm/api/tacticalrmm/app.ini`
|
||||
- Log: None
|
||||
- Journal identifier: `uwsgi`
|
||||
- Version: 2.0.18
|
||||
|
||||
=== ":material-docker: docker"
|
||||
|
||||
TBD - To Be Documented
|
||||
|
||||
#### Daphne: Django channels daemon
|
||||
|
||||
[Daphne](https://github.com/django/daphne) is the official ASGI HTTP/WebSocket server maintained by the [Channels project](https://channels.readthedocs.io/en/stable/index.html).
|
||||
|
||||
???+ note "systemd config"
|
||||
|
||||
- Django [Channels configuration docs](https://channels.readthedocs.io/en/stable/topics/channel_layers.html)
|
||||
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full daphne.service`
|
||||
- Stop: `systemctl stop daphne.service`
|
||||
- Start: `systemctl start daphne.service`
|
||||
- Restart: `systemctl restart daphne.service`
|
||||
- journalctl (this provides only system start/stop logs, not the actual logs):
|
||||
- "tail" the logs: `journalctl --identifier daphne --follow`
|
||||
- View the logs: `journalctl --identifier daphne --since "30 minutes ago" | less`
|
||||
|
||||
=== ":material-ubuntu: standard"
|
||||
|
||||
- Service: `daphne.service`
|
||||
- Socket: `/rmm/daphne.sock`
|
||||
- Exec: `/rmm/api/env/bin/daphne -u /rmm/daphne.sock tacticalrmm.asgi:application`
|
||||
- Config: `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py`
|
||||
- Log: `/rmm/api/tacticalrmm/tacticalrmm/private/log/debug.log`
|
||||
|
||||
=== ":material-docker: docker"
|
||||
|
||||
TBD - To Be Documented
|
||||
|
||||
#### NATS server service
|
||||
|
||||
[NATS](https://nats.io/) is a messaging bus for "live" communication between the agent and server. NATS provides the framework for the server to push commands to the agent and receive information back.
|
||||
|
||||
???+ note "systemd config"
|
||||
|
||||
- [NATS server configuration docs](https://docs.nats.io/running-a-nats-service/configuration)
|
||||
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full nats.service`
|
||||
- Stop: `systemctl stop nats.service`
|
||||
- Start: `systemctl start nats.service`
|
||||
- Restart: `systemctl restart nats.service`
|
||||
- Restart: `systemctl reload nats.service` reloads the config without restarting
|
||||
- journalctl:
|
||||
- "tail" the logs: `journalctl --identifier nats-server --follow`
|
||||
- View the logs: `journalctl --identifier nats-server --since "30 minutes ago" | less`
|
||||
- Listening process: `ss -tulnp | grep nats-server`
|
||||
|
||||
=== ":material-ubuntu: standard"
|
||||
|
||||
- Service: `nats.service`
|
||||
- Address: `0.0.0.0`
|
||||
- Port: `4222`
|
||||
- Exec: `/usr/local/bin/nats-server --config /rmm/api/tacticalrmm/nats-rmm.conf`
|
||||
- Config: `/rmm/api/tacticalrmm/nats-rmm.conf`
|
||||
- TLS: `/etc/letsencrypt/live/example.com/fullchain.pem`
|
||||
- Log: None
|
||||
- Version: v2.3.3
|
||||
|
||||
=== ":material-docker: docker"
|
||||
|
||||
TBD - To Be Documented
|
||||
|
||||
#### NATS API service
|
||||
|
||||
The NATS API service appears to bridge the connection between the NATS server and database, allowing the agent to save (i.e. push) information in the database.
|
||||
|
||||
???+ note "systemd config"
|
||||
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full nats-api.service`
|
||||
- Stop: `systemctl stop nats-api.service`
|
||||
- Start: `systemctl start nats-api.service`
|
||||
- Restart: `systemctl restart nats-api.service`
|
||||
- journalctl: This application does not appear to log anything.
|
||||
|
||||
=== ":material-ubuntu: standard"
|
||||
|
||||
- Service: `nats-api.service`
|
||||
- Exec: `/usr/local/bin/nats-api --config /rmm/api/tacticalrmm/nats-api.conf`
|
||||
- Config: `/rmm/api/tacticalrmm/nats-api.conf`
|
||||
- TLS: `/etc/letsencrypt/live/example.com/fullchain.pem`
|
||||
- Log: None
|
||||
|
||||
=== ":material-docker: docker"
|
||||
|
||||
TBD - To Be Documented
|
||||
|
||||
#### Celery service
|
||||
|
||||
[Celery](https://github.com/celery/celery) is a task queue focused on real-time processing and is responsible for scheduling tasks to be sent to agents.
|
||||
|
||||
Log located at `/var/log/celery`
|
||||
|
||||
```bash
|
||||
tail /var/log/celery
|
||||
```
|
||||
???+ note "systemd config"
|
||||
|
||||
[Django](https://www.djangoproject.com/)
|
||||
- [Celery docs](https://docs.celeryproject.org/en/stable/index.html)
|
||||
- [Celery configuration docs](https://docs.celeryproject.org/en/stable/userguide/configuration.html)
|
||||
|
||||
!!!description
|
||||
Framework to integrate the server to interact with browser
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full celery.service`
|
||||
- Stop: `systemctl stop celery.service`
|
||||
- Start: `systemctl start celery.service`
|
||||
- Restart: `systemctl restart celery.service`
|
||||
- journalctl: Celery executes `sh` causing the systemd identifier to be `sh`, thus mixing the `celery` and `celerybeat` logs together.
|
||||
- "tail" the logs: `journalctl --identifier sh --follow`
|
||||
- View the logs: `journalctl --identifier sh --since "30 minutes ago" | less`
|
||||
- Tail logs: `tail -F /var/log/celery/w*-*.log`
|
||||
|
||||
=== ":material-ubuntu: standard"
|
||||
|
||||
- Service: `celery.service`
|
||||
- Exec: `/bin/sh -c '${CELERY_BIN} -A $CELERY_APP multi start $CELERYD_NODES --pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} --loglevel="${CELERYD_LOG_LEVEL}" $CELERYD_OPTS'`
|
||||
- Config: `/etc/conf.d/celery.conf`
|
||||
- Log: `/var/log/celery/w*-*.log`
|
||||
|
||||
=== ":material-docker: docker"
|
||||
|
||||
TBD - To Be Documented
|
||||
|
||||
#### Celery Beat service
|
||||
|
||||
[celery beat](https://github.com/celery/django-celery-beat) is a scheduler; It kicks off tasks at regular intervals, that are then executed by available worker nodes in the cluster.
|
||||
|
||||
???+ note "systemd config"
|
||||
|
||||
- [Celery beat docs](https://docs.celeryproject.org/en/stable/userguide/periodic-tasks.html)
|
||||
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full celerybeat.service`
|
||||
- Stop: `systemctl stop celerybeat.service`
|
||||
- Start: `systemctl start celerybeat.service`
|
||||
- Restart: `systemctl restart celerybeat.service`
|
||||
- journalctl: Celery executes `sh` causing the systemd identifier to be `sh`, thus mixing the `celery` and `celerybeat` logs together.
|
||||
- "tail" the logs: `journalctl --identifier sh --follow`
|
||||
- View the logs: `journalctl --identifier sh --since "30 minutes ago" | less`
|
||||
- Tail logs: `tail -F /var/log/celery/beat.log`
|
||||
|
||||
=== ":material-ubuntu: standard"
|
||||
|
||||
- Service: `celerybeat.service`
|
||||
- Exec: `/bin/sh -c '${CELERY_BIN} -A ${CELERY_APP} beat --pidfile=${CELERYBEAT_PID_FILE} --logfile=${CELERYBEAT_LOG_FILE} --loglevel=${CELERYD_LOG_LEVEL}'`
|
||||
- Config: `/etc/conf.d/celery.conf`
|
||||
- Log: `/var/log/celery/beat.log`
|
||||
|
||||
=== ":material-docker: docker"
|
||||
|
||||
TBD - To Be Documented
|
||||
|
||||
#### MeshCentral
|
||||
|
||||
[MeshCentral](https://github.com/Ylianst/MeshCentral) is used for: "Take Control" (connecting to machine for remote access), and 2 screens of the "Remote Background" (Terminal, and File Browser).
|
||||
|
||||
???+ note "meshcentral"
|
||||
|
||||
- [MeshCentral docs](https://info.meshcentral.com/downloads/MeshCentral2/MeshCentral2UserGuide.pdf)
|
||||
|
||||
=== ":material-console-line: status commands"
|
||||
|
||||
- Status: `systemctl status --full meshcentral`
|
||||
- Stop: `systemctl stop meshcentral`
|
||||
- Start: `systemctl start meshcentral`
|
||||
- Restart: `systemctl restart meshcentral`
|
||||
|
||||
=== ":material-remote-desktop: Debugging"
|
||||
|
||||
- Open either "Take Control" or "Remote Background" to get mesh login token
|
||||
- Open https://mesh.example.com to open native mesh admin interface
|
||||
- Left-side "My Server" > Choose "Console" > type `agentstats`
|
||||
- To view detailed logging goto "Trace" > click Tracing button and choose categories
|
||||
|
||||
### Other Dependencies
|
||||
|
||||
[Django](https://www.djangoproject.com/) - Framework to integrate the server to interact with browser.
|
||||
|
||||
<details>
|
||||
<summary>Django dependencies</summary>
|
||||
|
||||
```text
|
||||
future==0.18.2
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.2
|
||||
@@ -60,33 +328,58 @@ pycryptodome==3.10.1
|
||||
pyotp==2.6.0
|
||||
pyparsing==2.4.7
|
||||
pytz==2021.1
|
||||
```
|
||||
</details>
|
||||
|
||||
[qrcode](https://pypi.org/project/qrcode/)
|
||||
[qrcode](https://pypi.org/project/qrcode/) - Creating QR codes for 2FA.
|
||||
|
||||
!!!description
|
||||
For creating QR codes for 2FA
|
||||
<details>
|
||||
<summary>qrcode dependencies</summary>
|
||||
|
||||
```text
|
||||
redis==3.5.3
|
||||
requests==2.25.1
|
||||
six==1.16.0
|
||||
sqlparse==0.4.1
|
||||
```
|
||||
</details>
|
||||
|
||||
[twilio](https://www.twilio.com/)
|
||||
[twilio](https://www.twilio.com/) - Python SMS notification integration.
|
||||
|
||||
!!!description
|
||||
Python SMS notification integration
|
||||
<details>
|
||||
<summary>twilio dependencies</summary>
|
||||
|
||||
```text
|
||||
urllib3==1.26.5
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
websockets==9.1
|
||||
zipp==3.4.1
|
||||
```
|
||||
</details>
|
||||
|
||||
|
||||
## Windows Agent
|
||||
|
||||
Found in `%programfiles%\TacticalAgent`
|
||||
|
||||
### Outbound Firewall Rules
|
||||
|
||||
If you have strict firewall rules these are the only outbound rules from the agent needed for all functionality:
|
||||
|
||||
1. All agents have to be able to connect outbound to TRMM server on the 3 domain names on ports: 443 (agent and mesh) and 4222 (nats for checks/tasks/data)
|
||||
|
||||
2. The agent uses `https://icanhazip.tacticalrmm.io/` to get public IP info. If this site is down for whatever reason, the agent will fallback to `https://icanhazip.com` and then `https://ifconfig.co/ip`
|
||||
|
||||
#### Unsigned Agents
|
||||
|
||||
Unsigned agents require access to: `https://github.com/wh1te909/rmmagent/releases/*`
|
||||
|
||||
#### Signed Agents
|
||||
|
||||
Signed agents will require: `https://exe.tacticalrmm.io/` and `https://exe2.tacticalrmm.io/` for downloading/updating agents
|
||||
|
||||
### Services
|
||||
|
||||
3 services exist on all clients
|
||||
@@ -101,7 +394,7 @@ Found in `%programfiles%\TacticalAgent`
|
||||

|
||||

|
||||
|
||||
The [MeshCentral](https://meshcentral.com/) system which is accessible from <https://mesh.example.com> and is used
|
||||
The [MeshCentral](https://meshcentral.com/) system which is accessible from `https://mesh.example.com` and is used
|
||||
|
||||
* It runs 2 goroutines
|
||||
* one is the checkrunner which runs all the checks and then just sleeps until it's time to run more checks
|
||||
|
||||
BIN
docs/docs/images/3rdparty_splashtop1.png
Normal file
BIN
docs/docs/images/3rdparty_splashtop1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 13 KiB |
BIN
docs/docs/images/3rdparty_splashtop2.png
Normal file
BIN
docs/docs/images/3rdparty_splashtop2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
BIN
docs/docs/images/mesh_agent_onlineoffline.png
Normal file
BIN
docs/docs/images/mesh_agent_onlineoffline.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 25 KiB |
BIN
docs/docs/images/tipsntricks_filters.png
Normal file
BIN
docs/docs/images/tipsntricks_filters.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
BIN
docs/docs/images/tipsntricks_script_syntaxhelp.png
Normal file
BIN
docs/docs/images/tipsntricks_script_syntaxhelp.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 7.2 KiB |
17
docs/docs/install_considerations.md
Normal file
17
docs/docs/install_considerations.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Install Considerations
|
||||
|
||||
There's pluses and minuses to each install type. Be aware that:
|
||||
|
||||
- There is no migration script, once you've installed with one type there is no "conversion". You'll be installing a new server and migrating agents manually if you decide to go another way.
|
||||
|
||||
## Traditional Install
|
||||
|
||||
- It's a VM/machine. One storage device to backup if you want to do VM based backups
|
||||
- You have a [backup](backup.md) and [restore](restore.md) script
|
||||
|
||||
## Docker Install
|
||||
|
||||
- Docker is more complicated in concept: has volumes and images
|
||||
- If you're running multiple apps it uses less resources in the long run because you only have one OS base files underlying many Containers/Apps
|
||||
- Backup/restore is via Docker methods only
|
||||
- Docker has container replication/mirroring options for redundancy/multiple servers
|
||||
@@ -9,7 +9,7 @@ Install docker
|
||||
We'll be using `example.com` as our domain for this example.
|
||||
|
||||
!!!info
|
||||
The RMM uses 3 different sites. The Vue frontend e.g. `rmm.example.com` which is where you'll be accesing your RMM from the browser, the REST backend e.g. `api.example.com` and Meshcentral e.g. `mesh.example.com`
|
||||
The RMM uses 3 different sites. The Vue frontend e.g. `rmm.example.com` which is where you'll be accessing your RMM from the browser, the REST backend e.g. `api.example.com` and Meshcentral e.g. `mesh.example.com`
|
||||
|
||||
1. Get the public IP of your server with `curl https://icanhazip.tacticalrmm.io`
|
||||
2. Open the DNS manager of wherever the domain you purchased is hosted.
|
||||
@@ -34,7 +34,7 @@ We're using the [DNS-01 challenge method](https://letsencrypt.org/docs/challenge
|
||||
#### a. Deploy the TXT record in your DNS manager
|
||||
|
||||
!!!warning
|
||||
TXT records can take anywhere from 1 minute to a few hours to propogate depending on your DNS provider.<br/>
|
||||
TXT records can take anywhere from 1 minute to a few hours to propagate depending on your DNS provider.<br/>
|
||||
You should verify the TXT record has been deployed first before pressing Enter.<br/>
|
||||
A quick way to check is with the following command:<br/> `dig -t txt _acme-challenge.example.com`<br/>
|
||||
or test using: <https://viewdns.info/dnsrecord/> Enter: `_acme-challenge.example.com`
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
#### Hardware / OS
|
||||
|
||||
A fresh linux VM running either Ubuntu 20.04 LTS or Debian 10 with 3GB RAM
|
||||
A fresh linux VM running either Ubuntu 20.04 LTS or Debian 10/11 with 3GB RAM
|
||||
|
||||
!!!warning
|
||||
The provided install script assumes a fresh server with no software installed on it. Attempting to run it on an existing server with other services **will** break things and the install will fail.
|
||||
@@ -32,7 +32,7 @@ Install on a VPS: DigitalOcean, Linode, Vultr, BuyVM (highly recommended), Hetzn
|
||||
|
||||
Use something that meets [minimum specs](install_server.md#hardware-os)
|
||||
|
||||
### Run updates and setup the linux user
|
||||
### Run Updates on OS
|
||||
|
||||
SSH into the server as **root**.
|
||||
|
||||
@@ -46,6 +46,8 @@ apt -y upgrade
|
||||
|
||||
If a new kernel is installed, then reboot the server with the `reboot` command
|
||||
|
||||
### Create a linux user
|
||||
|
||||
Create a linux user named `tactical` to run the rmm and add it to the sudoers group.
|
||||
|
||||
**For Ubuntu**:
|
||||
@@ -63,7 +65,7 @@ usermod -a -G sudo tactical
|
||||
```
|
||||
|
||||
!!!tip
|
||||
[Enable passwordless sudo to make your life easier](https://linuxconfig.org/configure-sudo-without-password-on-ubuntu-20-04-focal-fossa-linux)
|
||||
[Enable passwordless sudo to make your life easier in the future](https://linuxconfig.org/configure-sudo-without-password-on-ubuntu-20-04-focal-fossa-linux)
|
||||
|
||||
### Setup the firewall (optional but highly recommended)
|
||||
|
||||
@@ -98,12 +100,15 @@ Enable and activate the firewall
|
||||
ufw enable && ufw reload
|
||||
```
|
||||
|
||||
!!!note
|
||||
You will never login to the server again as `root` again unless something has gone horribly wrong, and you're working with the developers.
|
||||
|
||||
### Create the A records
|
||||
|
||||
We'll be using `example.com` as our domain for this example.
|
||||
|
||||
!!!info
|
||||
The RMM uses 3 different sites. The Vue frontend e.g. `rmm.example.com` which is where you'll be accesing your RMM from the browser, the REST backend e.g. `api.example.com` and Meshcentral e.g. `mesh.example.com`
|
||||
The RMM uses 3 different sites. The Vue frontend e.g. `rmm.example.com` which is where you'll be accessing your RMM from the browser, the REST backend e.g. `api.example.com` and Meshcentral e.g. `mesh.example.com`
|
||||
|
||||
1. Get the public IP of your server with `curl https://icanhazip.tacticalrmm.io`
|
||||
2. Open the DNS manager of wherever the domain you purchased is hosted.
|
||||
@@ -134,7 +139,7 @@ Answer the initial questions when prompted. Replace `example.com` with your doma
|
||||
### Deploy the TXT record in your DNS manager for Lets Encrypt wildcard certs
|
||||
|
||||
!!!warning
|
||||
TXT records can take anywhere from 1 minute to a few hours to propogate depending on your DNS provider.<br/>
|
||||
TXT records can take anywhere from 1 minute to a few hours to propagate depending on your DNS provider.<br/>
|
||||
You should verify the TXT record has been deployed first before pressing Enter.<br/>
|
||||
A quick way to check is with the following command:<br/> `dig -t txt _acme-challenge.example.com`<br/>
|
||||
or test using: <https://viewdns.info/dnsrecord/> Enter: `_acme-challenge.example.com`
|
||||
@@ -181,7 +186,7 @@ If you have agents outside your local network: Make sure the public DNS servers
|
||||
|
||||
Login to your router/NAT device.
|
||||
|
||||
1. Set your TRMM server as a static IP (Use a DHCP reservation is usually safer)
|
||||
1. Set your TRMM server as a static IP (Using a DHCP reservation is usually safer)
|
||||
2. Create 2 port forwarding rules. `TCP Port 443` and `TCP Port 4222` to your TRMM servers private IP address.
|
||||
|
||||
!!!note
|
||||
|
||||
@@ -7,6 +7,32 @@ cd /rmm/api/tacticalrmm
|
||||
source ../env/bin/activate
|
||||
```
|
||||
|
||||
or docker version:
|
||||
|
||||
```bash
|
||||
docker exec -it trmm-backend /bin/bash
|
||||
/opt/venv/bin/python /opt/tactical/api/manage.py shell
|
||||
```
|
||||
|
||||
!!!tip
|
||||
The Dev Docker version it would be `docker exec -it trmm-api-dev env/bin/python manage.py shell`
|
||||
|
||||
## Bulk Delete old agents by last checkin date or agent version
|
||||
|
||||
Test to see what will happen
|
||||
|
||||
```bash
|
||||
python manage.py bulk_delete_agents --days 60
|
||||
python manage.py bulk_delete_agents --agentver 1.5.0
|
||||
```
|
||||
|
||||
Do the delete
|
||||
|
||||
```bash
|
||||
python manage.py bulk_delete_agents --days 60 --delete
|
||||
python manage.py bulk_delete_agents --agentver 1.5.0 --delete
|
||||
```
|
||||
|
||||
## Reset a user's password
|
||||
|
||||
```bash
|
||||
@@ -25,6 +51,13 @@ python manage.py reset_2fa <username>
|
||||
python manage.py find_software "adobe"
|
||||
```
|
||||
|
||||
## Set specific Windows update to not install
|
||||
|
||||
```bash
|
||||
from winupdate.models import WinUpdate
|
||||
WinUpdate.objects.filter(kb="KB5007186").update(action="ignore", date_installed=None)
|
||||
```
|
||||
|
||||
## Show outdated online agents
|
||||
|
||||
```bash
|
||||
@@ -37,6 +70,14 @@ python manage.py show_outdated_agents
|
||||
python manage.py delete_tokens
|
||||
```
|
||||
|
||||
## Reset all Auth Tokens for Install agents and web sessions
|
||||
|
||||
```bash
|
||||
python manage.py shell
|
||||
from knox.models import AuthToken
|
||||
AuthToken.objects.all().delete()
|
||||
```
|
||||
|
||||
## Check for orphaned tasks on all agents and remove them
|
||||
|
||||
```bash
|
||||
|
||||
@@ -8,13 +8,79 @@
|
||||
|
||||
Make sure you update your old RMM to the latest version using the `update.sh` script and then run a fresh backup to use with this restore script.
|
||||
|
||||
## Prepare the new server
|
||||
## Install the new server
|
||||
|
||||
Create the same exact linux user account as you did when you installed the original server.
|
||||
### Run Updates on OS
|
||||
|
||||
Add it to the sudoers group and setup the firewall.
|
||||
SSH into the server as **root**.
|
||||
|
||||
Refer to the [installation instructions](install_server.md) for steps on how to do all of the above.
|
||||
Download and run the prereqs and latest updates
|
||||
|
||||
```bash
|
||||
apt update
|
||||
apt install -y wget curl sudo
|
||||
apt -y upgrade
|
||||
```
|
||||
|
||||
If a new kernel is installed, then reboot the server with the `reboot` command
|
||||
|
||||
### Create a linux user
|
||||
|
||||
Create a linux user named `tactical` to run the rmm and add it to the sudoers group.
|
||||
|
||||
**For Ubuntu**:
|
||||
|
||||
```bash
|
||||
adduser tactical
|
||||
usermod -a -G sudo tactical
|
||||
```
|
||||
|
||||
**For Debian**:
|
||||
|
||||
```bash
|
||||
useradd -m -s /bin/bash tactical
|
||||
usermod -a -G sudo tactical
|
||||
```
|
||||
|
||||
!!!tip
|
||||
[Enable passwordless sudo to make your life easier in the future](https://linuxconfig.org/configure-sudo-without-password-on-ubuntu-20-04-focal-fossa-linux)
|
||||
|
||||
### Setup the firewall (optional but highly recommended)
|
||||
|
||||
!!!info
|
||||
Skip this step if your VM is __not__ publicly exposed to the world e.g. running behind NAT. You should setup the firewall rules in your router instead (ports 22, 443 and 4222 TCP).
|
||||
|
||||
```bash
|
||||
ufw default deny incoming
|
||||
ufw default allow outgoing
|
||||
ufw allow https
|
||||
ufw allow proto tcp from any to any port 4222
|
||||
```
|
||||
|
||||
!!!info
|
||||
SSH (port 22 tcp) is only required for you to remotely login and do basic linux server administration for your rmm. It is not needed for any agent communication.<br/>
|
||||
Allow ssh from everywhere (__not__ recommended)
|
||||
|
||||
```bash
|
||||
ufw allow ssh
|
||||
```
|
||||
|
||||
Allow ssh from only allowed IP's (__highly__ recommended)
|
||||
|
||||
```bash
|
||||
ufw allow proto tcp from X.X.X.X to any port 22
|
||||
ufw allow proto tcp from X.X.X.X to any port 22
|
||||
```
|
||||
|
||||
Enable and activate the firewall
|
||||
|
||||
```bash
|
||||
ufw enable && ufw reload
|
||||
```
|
||||
|
||||
!!!note
|
||||
You will never login to the server again as `root` again unless something has gone horribly wrong, and you're working with the developers.
|
||||
|
||||
|
||||
## Change DNS A records
|
||||
|
||||
@@ -24,16 +90,16 @@ Change the 3 A records `rmm`, `api` and `mesh` and point them to the public IP o
|
||||
|
||||
## Run the restore script
|
||||
|
||||
Copy the backup tar file you created during [backup](backup.md) to the new server.
|
||||
1. Make sure you're logged in with the non-root user (eg `tactical`)
|
||||
|
||||
Download the restore script.
|
||||
2. Copy the backup tar file you created during [backup](backup.md) to the new server.
|
||||
|
||||
```bash
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
||||
chmod +x restore.sh
|
||||
```
|
||||
3. Download the restore script.
|
||||
|
||||
Call the restore script, passing it the backup file as the first argument:
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
||||
chmod +x restore.sh
|
||||
|
||||
4. Call the restore script, passing it the backup file as the first argument:
|
||||
|
||||
```bash
|
||||
./restore.sh rmm-backup-XXXXXXXXX.tar
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
# Tips and Tricks
|
||||
|
||||
## Server Monitoring
|
||||
|
||||
Monitor Network usage: <https://humdi.net/vnstat/>
|
||||
|
||||
Realtime Everything Usage: (_only run when needed because it uses a lot of resources_): <https://learn.netdata.cloud/docs/agent/packaging/installer/methods/kickstart>
|
||||
|
||||
## Customize User Interface
|
||||
|
||||
At the top right of your web administration interface, click your Username > preferences. Set default tab: Servers|Workstations|Mixed
|
||||
@@ -8,6 +14,11 @@ At the top right of your web administration interface, click your Username > pre
|
||||
|
||||
*****
|
||||
|
||||
## Use the filters in the agent list
|
||||
|
||||

|
||||
|
||||
*****
|
||||
## MeshCentral
|
||||
|
||||
Tactical RMM is actually 2 products: An RMM service with agent, and a secondary [MeshCentral](https://github.com/Ylianst/MeshCentral) install that handles the `Take Control` and `Remote Background` stuff.
|
||||
@@ -25,7 +36,7 @@ Right-click the connect button in *Take Control* for connect options
|
||||
### Enable Remote Control options
|
||||
|
||||
!!!note
|
||||
These settings are independant of Tactical RMM. Enable features (like auto remove inactive devices) with caution
|
||||
These settings are independent of Tactical RMM. Enable features (like auto remove inactive devices) with caution
|
||||
|
||||
1. Remote background a machine then go to mesh.EXAMPLE.COM
|
||||
2. Click on My Account
|
||||
@@ -34,6 +45,20 @@ Right-click the connect button in *Take Control* for connect options
|
||||

|
||||
5. You can also change features by ticking whatever boxes you want in there (Features: Sync server device name to hostname, Automatically remove inactive devices, Notify/Prompt for Consent/Connection Toolbar settings)<br>
|
||||

|
||||
|
||||
6. Ok your way out
|
||||
|
||||
### Agent online/offline logs
|
||||
|
||||
In mesh from the agent | General Tab
|
||||
|
||||

|
||||
## Scripts
|
||||
|
||||
### When Running Scripts
|
||||
|
||||
Use the (i) at the end of the script name to:
|
||||
|
||||
- Hover: see script parameter syntax help
|
||||
- Left Click: Opens the script source in Github
|
||||
|
||||

|
||||
|
||||
@@ -63,9 +63,44 @@ If you have agents that are relatively old, you will need to uninstall them manu
|
||||
|
||||
## Agents not checking in or showing up / General agent issues
|
||||
|
||||
These are nats problems. Try quickfix first:
|
||||
|
||||
### from Admin Web Interface
|
||||
|
||||
First, reload NATS from tactical's web UI:<br>
|
||||
*Tools > Server Maintenance > Reload Nats Configuration*
|
||||
|
||||
If that doesn't work, check each part starting with the server:
|
||||
|
||||
### Server SSH login
|
||||
|
||||
Reload NATS:
|
||||
|
||||
```bash
|
||||
/rmm/api/env/bin/python /rmm/api/tacticalrmm/manage.py reload_nats
|
||||
sudo systemctl restart nats
|
||||
```
|
||||
|
||||
Look at nats service errors (make sure it's running)
|
||||
|
||||
```bash
|
||||
sudo systemctl status nats
|
||||
```
|
||||
|
||||
If nats isn't running see detailed reason why it isn't:
|
||||
|
||||
```bash
|
||||
sudo systemctl stop nats
|
||||
nats-server -DVV -c /rmm/api/tacticalrmm/nats-rmm.conf
|
||||
```
|
||||
|
||||
Fix the problem, then restart nats.
|
||||
```
|
||||
sudo systemctl restart nats
|
||||
```
|
||||
|
||||
### From Agent Install
|
||||
|
||||
Open CMD as admin on the problem computer and stop the agent services:
|
||||
|
||||
```cmd
|
||||
@@ -114,6 +149,7 @@ sudo systemctl status celery
|
||||
sudo systemctl status celerybeat
|
||||
sudo systemctl status nginx
|
||||
sudo systemctl status nats
|
||||
sudo systemctl status nats-api
|
||||
sudo systemctl status meshcentral
|
||||
sudo systemctl status mongod
|
||||
sudo systemctl status postgresql
|
||||
@@ -161,3 +197,11 @@ Are you trying to use a proxy to share your single public IP with multiple servi
|
||||
4. Click the add link
|
||||
5. Download both agents
|
||||
6. In Tactical RMM, go **Settings > Global Settings > MeshCentral > Upload Mesh Agents** upload them both into the appropriate places.
|
||||
|
||||
## Need to recover your mesh token?
|
||||
|
||||
Login to server with SSH and run:
|
||||
|
||||
```bash
|
||||
node /meshcentral/node_modules/meshcentral --logintokenkey
|
||||
```
|
||||
@@ -430,7 +430,7 @@ You need to add the certificate private key and public keys to the following fil
|
||||
|
||||
7. Restart services
|
||||
|
||||
sudo systemctl restart rmm celery celerybeat nginx nats natsapi
|
||||
sudo systemctl restart rmm celery celerybeat nginx nats nats-api
|
||||
|
||||
## Use certbot to do acme challenge over http
|
||||
|
||||
@@ -720,7 +720,7 @@ python manage.py reload_nats
|
||||
|
||||
### Restart services
|
||||
|
||||
for i in rmm celery celerybeat nginx nats natsapi
|
||||
for i in rmm celery celerybeat nginx nats nats-api
|
||||
do
|
||||
printf >&2 "${GREEN}Restarting ${i} service...${NC}\n"
|
||||
sudo systemctl restart ${i}
|
||||
@@ -869,3 +869,66 @@ Limit access to Tactical RMM's administration panel in nginx to specific locatio
|
||||
server_name rmm.example.com;
|
||||
return 404;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## Apache Proxy
|
||||
howto - proxy on apache
|
||||
### TRMM SERVER
|
||||
edit file /etc/nginx/sites-available/rmm.conf
|
||||
add the lines from 'real_ip' module inside server tag:
|
||||
|
||||
|
||||
set_real_ip_from 192.168.0.200; #IP Address of your apache proxy
|
||||
real_ip_header X-Forwarded-For;
|
||||
|
||||
restart nginx
|
||||
|
||||
systemctl restart nginx
|
||||
|
||||
### APACHE
|
||||
enable ssl proxy, rewriteEngine.
|
||||
set proxy to preserve host.
|
||||
set upgrade rule to websocket.
|
||||
set proxypass rules redirecting to rmm location
|
||||
|
||||
on your apache ssl config
|
||||
example:
|
||||
|
||||
<VirtualHost *:443>
|
||||
ServerName rmm.blablabla.com.br:443
|
||||
ServerAlias mesh.blablabla.com.br:443 api.blablabla.com.br:443
|
||||
SSLEngine on
|
||||
|
||||
SSLCertificateFile "C:/Apache24/conf/ssl-rmm.blablabla.com.br/_.blablabla.com.br-chain.pem"
|
||||
SSLCertificateKeyFile "C:/Apache24/conf/ssl-rmm.blablabla.com.br/_.blablabla.com.br-key.pem"
|
||||
|
||||
SSLProxyEngine on
|
||||
|
||||
RewriteEngine On
|
||||
ProxyPreserveHost On
|
||||
|
||||
# When Upgrade:websocket header is present, redirect to ws
|
||||
# Using NC flag (case-insensitive) as some browsers will pass Websocket
|
||||
RewriteCond %{HTTP:Upgrade} =websocket [NC]
|
||||
RewriteRule ^/(.*) wss://192.168.0.212/$1 [P,L]
|
||||
|
||||
ProxyPass "/" "https://192.168.0..212/" retry=3
|
||||
ProxyPassReverse "/" "https://192.168.0.212/" retry=3
|
||||
|
||||
BrowserMatch "MSIE [2-5]" \
|
||||
nokeepalive ssl-unclean-shutdown \
|
||||
downgrade-1.0 force-response-1.0
|
||||
|
||||
</VirtualHost>
|
||||
|
||||
|
||||
### Updating certificate:
|
||||
Im my case, auto DNS Challenge from apache, so every time we get new cert files, it must be copied inside rmm too.
|
||||
just overwrite default location:
|
||||
/etc/letsencrypt/archive/blablablabla
|
||||
or change certs location on nginx conf to whatever you want.
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
|
||||
## Updating to the latest RMM version
|
||||
|
||||
!!!question
|
||||
You have a [backup](https://docs.docker.com/desktop/backup-and-restore/) right?
|
||||
|
||||
Tactical RMM updates the docker images on every release and should be available within a few minutes
|
||||
|
||||
SSH into your server as a root user and run the below commands:
|
||||
@@ -23,7 +26,7 @@ To renew your Let's Encrypt wildcard cert, run the following command, replacing
|
||||
sudo certbot certonly --manual -d *.example.com --agree-tos --no-bootstrap --manual-public-ip-logging-ok --preferred-challenges dns -m admin@example.com --no-eff-email
|
||||
```
|
||||
|
||||
Verify the domain with the TXT record. Once issued, run the below commands to base64 encode the certificates and add then to the .env file
|
||||
Verify the domain with the TXT record. Once issued, run the below commands to base64 encode the certificates and add them to the .env file
|
||||
|
||||
```bash
|
||||
echo "CERT_PUB_KEY=$(sudo base64 -w 0 /etc/letsencrypt/live/${rootdomain}/fullchain.pem)" >> .env
|
||||
|
||||
@@ -19,13 +19,16 @@ Other than this, you should avoid making any changes to your server and let the
|
||||
|
||||
Sometimes, manual intervention will be required during an update in the form of yes/no prompts, so attempting to automate this will ignore these prompts and cause your installation to break.
|
||||
|
||||
SSH into your server as the linux user you created during install.
|
||||
SSH into your server as the linux user you created during install (eg `tactical`).
|
||||
|
||||
!!!danger
|
||||
__Never__ run any update scripts or commands as the `root` user.
|
||||
|
||||
This will mess up permissions and break your installation.
|
||||
|
||||
!!!question
|
||||
You have a [backup](backup.md) right?
|
||||
|
||||
Download the update script and run it:
|
||||
|
||||
```bash
|
||||
@@ -42,7 +45,7 @@ You can pass the optional `--force` flag to the update script to forcefully run
|
||||
./update.sh --force
|
||||
```
|
||||
|
||||
This is usefull for a botched update that might have not completed fully.
|
||||
This is useful for a botched update that might have not completed fully.
|
||||
|
||||
The update script will also fix any permissions that might have gotten messed up during a botched update, or if you accidentally ran the update script as the `root` user.
|
||||
|
||||
@@ -64,7 +67,7 @@ To renew your Let's Encrypt wildcard cert, run the following command, replacing
|
||||
sudo certbot certonly --manual -d *.example.com --agree-tos --no-bootstrap --manual-public-ip-logging-ok --preferred-challenges dns -m admin@example.com --no-eff-email
|
||||
```
|
||||
|
||||
Same instructions as during install for [verifying the TXT record](install_server.md#deploy-the-txt-record-in-your-dns-manager) has propogated before hitting Enter.
|
||||
Same instructions as during install for [verifying the TXT record](install_server.md#deploy-the-txt-record-in-your-dns-manager) has propagated before hitting Enter.
|
||||
|
||||
After this you have renewed the cert, simply run the `update.sh` script, passing it the `--force` flag.
|
||||
|
||||
|
||||
@@ -3,15 +3,18 @@ nav:
|
||||
- Home: index.md
|
||||
- Sponsor: sponsor.md
|
||||
- Code Signing: code_signing.md
|
||||
- RMM Installation:
|
||||
- RMM Server Installation:
|
||||
- "Install Considerations": install_considerations.md
|
||||
- "Traditional Install": install_server.md
|
||||
- "Docker Install": install_docker.md
|
||||
- Agent Installation: install_agent.md
|
||||
- Updating:
|
||||
- RMM Server Updating:
|
||||
- "Updating the RMM": update_server.md
|
||||
- "Updating the RMM (Docker)": update_docker.md
|
||||
- Agents:
|
||||
- "Agent Installation": install_agent.md
|
||||
- "Updating Agents": update_agents.md
|
||||
- Functionality:
|
||||
- "How it all Works": howitallworks.md
|
||||
- "Alerting": functions/alerting.md
|
||||
- "API Access": functions/api.md
|
||||
- "Automated Tasks": functions/automated_tasks.md
|
||||
@@ -20,6 +23,7 @@ nav:
|
||||
- "Django Admin": functions/django_admin.md
|
||||
- "Global Keystore": functions/keystore.md
|
||||
- "Maintenance Mode": functions/maintenance_mode.md
|
||||
- "Permissions": functions/permissions.md
|
||||
- "Remote Background": functions/remote_bg.md
|
||||
- "Settings Override": functions/settings_override.md
|
||||
- "Scripting": functions/scripting.md
|
||||
@@ -36,6 +40,7 @@ nav:
|
||||
- "Grafana": 3rdparty_grafana.md
|
||||
- "AnyDesk": 3rdparty_anydesk.md
|
||||
- "Connectwise Control / Screenconnect": 3rdparty_screenconnect.md
|
||||
- "Splashtop": 3rdparty_splashtop.md
|
||||
- "TeamViewer": 3rdparty_teamviewer.md
|
||||
- "BitDefender GravityZone": 3rdparty_bitdefender_gravityzone.md
|
||||
- Unsupported Extras:
|
||||
@@ -71,16 +76,25 @@ theme:
|
||||
palette:
|
||||
primary: "white"
|
||||
accent: "indigo"
|
||||
features:
|
||||
extra_css:
|
||||
- stylesheets/extra.css
|
||||
extra:
|
||||
social:
|
||||
- icon: fontawesome/brands/github
|
||||
link: "https://github.com/wh1te909/tacticalrmm"
|
||||
|
||||
markdown_extensions:
|
||||
- pymdownx.inlinehilite
|
||||
- admonition
|
||||
- pymdownx.details
|
||||
- codehilite:
|
||||
guess_lang: false
|
||||
- toc:
|
||||
permalink: true
|
||||
- pymdownx.emoji:
|
||||
emoji_index: !!python/name:materialx.emoji.twemoji
|
||||
emoji_generator: !!python/name:materialx.emoji.to_svg
|
||||
- pymdownx.superfences
|
||||
- pymdownx.tabbed:
|
||||
alternate_style: true
|
||||
|
||||
13
go.mod
13
go.mod
@@ -1,13 +1,22 @@
|
||||
module github.com/wh1te909/tacticalrmm
|
||||
|
||||
go 1.16
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/jmoiron/sqlx v1.3.4
|
||||
github.com/lib/pq v1.10.2
|
||||
github.com/nats-io/nats-server/v2 v2.4.0 // indirect
|
||||
github.com/nats-io/nats.go v1.12.0
|
||||
github.com/nats-io/nats.go v1.12.3
|
||||
github.com/ugorji/go/codec v1.2.6
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211112185254-e9c45faf2b83
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/nats-io/nkeys v0.3.0 // indirect
|
||||
github.com/nats-io/nuid v1.0.1 // indirect
|
||||
github.com/sirupsen/logrus v1.8.1 // indirect
|
||||
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e // indirect
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 // indirect
|
||||
)
|
||||
|
||||
21
go.sum
21
go.sum
@@ -1,3 +1,4 @@
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
@@ -31,17 +32,34 @@ github.com/nats-io/jwt/v2 v2.0.3 h1:i/O6cmIsjpcQyWDYNcq2JyZ3/VTF8SJ4JWluI5OhpvI=
|
||||
github.com/nats-io/jwt/v2 v2.0.3/go.mod h1:VRP+deawSXyhNjXmxPCHskrR6Mq50BqpEI5SEcNiGlY=
|
||||
github.com/nats-io/nats-server/v2 v2.4.0 h1:auni7PHiuyXR4BnDPzLVs3iyO7W7XUmZs8J5cjVb2BE=
|
||||
github.com/nats-io/nats-server/v2 v2.4.0/go.mod h1:TUAhMFYh1VISyY/D4WKJUMuGHg8yHtoUTuxkbiej1lc=
|
||||
github.com/nats-io/nats.go v1.12.0 h1:n0oZzK2aIZDMKuEiMKJ9qkCUgVY5vTAAksSXtLlz5Xc=
|
||||
github.com/nats-io/nats.go v1.12.0/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w=
|
||||
github.com/nats-io/nats.go v1.12.3 h1:te0GLbRsjtejEkZKKiuk46tbfIn6FfCSv3WWSo1+51E=
|
||||
github.com/nats-io/nats.go v1.12.3/go.mod h1:BPko4oXsySz4aSWeFgOHLZs3G4Jq4ZAyE6/zMCxRT6w=
|
||||
github.com/nats-io/nkeys v0.2.0/go.mod h1:XdZpAbhgyyODYqjTawOnIOI7VlbKSarI9Gfy1tqEu/s=
|
||||
github.com/nats-io/nkeys v0.3.0 h1:cgM5tL53EvYRU+2YLXIK0G2mJtK12Ft9oeooSZMA2G8=
|
||||
github.com/nats-io/nkeys v0.3.0/go.mod h1:gvUNGjVcM2IPr5rCsRsC6Wb3Hr2CQAm08dsxtV6A5y4=
|
||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
|
||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/ugorji/go v1.2.6 h1:tGiWC9HENWE2tqYycIqFTNorMmFRVhNwCpDOpWqnk8E=
|
||||
github.com/ugorji/go v1.2.6/go.mod h1:anCg0y61KIhDlPZmnH+so+RQbysYVyDko0IMgJv0Nn0=
|
||||
github.com/ugorji/go/codec v1.2.6 h1:7kbGefxLoDBuYXOms4yD7223OpNMMPNPZxXk5TvFcyQ=
|
||||
github.com/ugorji/go/codec v1.2.6/go.mod h1:V6TCNZ4PHqoHGFZuSG1W8nrCzzdgA2DozYxWFFpvxTw=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211001174053-e5699d36a79b h1:WLA6eHSBVuuUSrwDO9K4srMAGY/NEyBwxe0beFQyXEg=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211001174053-e5699d36a79b/go.mod h1:ILUz1utl5KgwrxmNHv0RpgMtKeh8gPAABvK2MiXBqv8=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111174321-133e360c1dc9 h1:2yQWajVLFbhoQT2HBq3HpVA1WwfkwXGxf805qR6MEx4=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111174321-133e360c1dc9/go.mod h1:ILUz1utl5KgwrxmNHv0RpgMtKeh8gPAABvK2MiXBqv8=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111183133-95fd87bc23ff h1:rmMbsIlEuAmPeBssEjcZCh5hRYtc6ajKuhvlCrSQj64=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111183133-95fd87bc23ff/go.mod h1:ILUz1utl5KgwrxmNHv0RpgMtKeh8gPAABvK2MiXBqv8=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111190958-39c3e2dfec67 h1:sez6UO2rKiCKYa4VTPKfmEyO0Qn6Bps2T//2Y3YkKbM=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111190958-39c3e2dfec67/go.mod h1:ILUz1utl5KgwrxmNHv0RpgMtKeh8gPAABvK2MiXBqv8=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111193154-6d7f8e4d0dcd h1:18S4tn72OOCWGbfkaMI7mo6luFWM7gi9vg5uofLfdTE=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211111193154-6d7f8e4d0dcd/go.mod h1:ILUz1utl5KgwrxmNHv0RpgMtKeh8gPAABvK2MiXBqv8=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211112185254-e9c45faf2b83 h1:faCwMxF0DwMppqThweKdmoxfruB/C/NjTYDG5d9O5V4=
|
||||
github.com/wh1te909/trmm-shared v0.0.0-20211112185254-e9c45faf2b83/go.mod h1:ILUz1utl5KgwrxmNHv0RpgMtKeh8gPAABvK2MiXBqv8=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
@@ -52,6 +70,7 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
||||
golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 h1:SrN+KX8Art/Sf4HNj6Zcz06G7VEz+7w9tdXTPOZ7+l4=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
|
||||
45
install.sh
45
install.sh
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="55"
|
||||
SCRIPT_VERSION="57"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh'
|
||||
|
||||
sudo apt install -y curl wget dirmngr gnupg lsb-release
|
||||
@@ -40,11 +40,11 @@ fi
|
||||
|
||||
|
||||
# determine system
|
||||
if ([ "$osname" = "ubuntu" ] && [ "$fullrelno" = "20.04" ]) || ([ "$osname" = "debian" ] && [ $relno -eq 10 ]); then
|
||||
if ([ "$osname" = "ubuntu" ] && [ "$fullrelno" = "20.04" ]) || ([ "$osname" = "debian" ] && [ $relno -ge 10 ]); then
|
||||
echo $fullrel
|
||||
else
|
||||
echo $fullrel
|
||||
echo -ne "${RED}Only Ubuntu release 20.04 and Debian 10 are supported\n"
|
||||
echo -ne "${RED}Supported versions: Ubuntu 20.04, Debian 10 and 11\n"
|
||||
echo -ne "Your system does not appear to be supported${NC}\n"
|
||||
exit 1
|
||||
fi
|
||||
@@ -64,9 +64,11 @@ fi
|
||||
|
||||
if ([ "$osname" = "ubuntu" ]); then
|
||||
mongodb_repo="deb [arch=amd64] https://repo.mongodb.org/apt/$osname $codename/mongodb-org/4.4 multiverse"
|
||||
# there is no bullseye repo yet for mongo so just use buster on debian 11
|
||||
elif ([ "$osname" = "debian" ] && [ $relno -eq 11 ]); then
|
||||
mongodb_repo="deb [arch=amd64] https://repo.mongodb.org/apt/$osname buster/mongodb-org/4.4 main"
|
||||
else
|
||||
mongodb_repo="deb [arch=amd64] https://repo.mongodb.org/apt/$osname $codename/mongodb-org/4.4 main"
|
||||
|
||||
fi
|
||||
|
||||
postgresql_repo="deb [arch=amd64] https://apt.postgresql.org/pub/repos/apt/ $codename-pgdg main"
|
||||
@@ -193,14 +195,14 @@ print_green 'Installing Python 3.9'
|
||||
sudo apt install -y build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libreadline-dev libffi-dev libsqlite3-dev libbz2-dev
|
||||
numprocs=$(nproc)
|
||||
cd ~
|
||||
wget https://www.python.org/ftp/python/3.9.6/Python-3.9.6.tgz
|
||||
tar -xf Python-3.9.6.tgz
|
||||
cd Python-3.9.6
|
||||
wget https://www.python.org/ftp/python/3.9.9/Python-3.9.9.tgz
|
||||
tar -xf Python-3.9.9.tgz
|
||||
cd Python-3.9.9
|
||||
./configure --enable-optimizations
|
||||
make -j $numprocs
|
||||
sudo make altinstall
|
||||
cd ~
|
||||
sudo rm -rf Python-3.9.6 Python-3.9.6.tgz
|
||||
sudo rm -rf Python-3.9.9 Python-3.9.9.tgz
|
||||
|
||||
|
||||
print_green 'Installing redis and git'
|
||||
@@ -351,6 +353,7 @@ pip install --no-cache-dir setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||
pip install --no-cache-dir -r /rmm/api/tacticalrmm/requirements.txt
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
printf >&2 "${YELLOW}%0.s*${NC}" {1..80}
|
||||
@@ -439,7 +442,7 @@ echo "${daphneservice}" | sudo tee /etc/systemd/system/daphne.service > /dev/nul
|
||||
natsservice="$(cat << EOF
|
||||
[Unit]
|
||||
Description=NATS Server
|
||||
After=network.target ntp.service
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
PrivateTmp=true
|
||||
@@ -451,6 +454,7 @@ User=${USER}
|
||||
Group=www-data
|
||||
Restart=always
|
||||
RestartSec=5s
|
||||
LimitNOFILE=1000000
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -458,6 +462,25 @@ EOF
|
||||
)"
|
||||
echo "${natsservice}" | sudo tee /etc/systemd/system/nats.service > /dev/null
|
||||
|
||||
natsapi="$(cat << EOF
|
||||
[Unit]
|
||||
Description=TacticalRMM Nats Api v1
|
||||
After=nats.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/nats-api
|
||||
User=${USER}
|
||||
Group=${USER}
|
||||
Restart=always
|
||||
RestartSec=5s
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
)"
|
||||
echo "${natsapi}" | sudo tee /etc/systemd/system/nats-api.service > /dev/null
|
||||
|
||||
nginxrmm="$(cat << EOF
|
||||
server_tokens off;
|
||||
|
||||
@@ -791,6 +814,10 @@ python manage.py reload_nats
|
||||
deactivate
|
||||
sudo systemctl start nats.service
|
||||
|
||||
sleep 1
|
||||
sudo systemctl enable nats-api.service
|
||||
sudo systemctl start nats-api.service
|
||||
|
||||
## disable django admin
|
||||
sed -i 's/ADMIN_ENABLED = True/ADMIN_ENABLED = False/g' /rmm/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
|
||||
31
main.go
31
main.go
@@ -6,15 +6,19 @@ import (
|
||||
"flag"
|
||||
"fmt"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/wh1te909/tacticalrmm/natsapi"
|
||||
)
|
||||
|
||||
var version = "2.3.0"
|
||||
var (
|
||||
version = "3.0.0"
|
||||
log = logrus.New()
|
||||
)
|
||||
|
||||
func main() {
|
||||
ver := flag.Bool("version", false, "Prints version")
|
||||
mode := flag.String("m", "", "Mode")
|
||||
config := flag.String("c", "", "config file")
|
||||
cfg := flag.String("config", "", "Path to config file")
|
||||
logLevel := flag.String("log", "INFO", "The log level")
|
||||
flag.Parse()
|
||||
|
||||
if *ver {
|
||||
@@ -22,14 +26,15 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
switch *mode {
|
||||
case "wmi":
|
||||
api.GetWMI(*config)
|
||||
case "checkin":
|
||||
api.CheckIn(*config)
|
||||
case "agentinfo":
|
||||
api.AgentInfo(*config)
|
||||
default:
|
||||
fmt.Println(version)
|
||||
}
|
||||
setupLogging(logLevel)
|
||||
|
||||
api.Svc(log, *cfg)
|
||||
}
|
||||
|
||||
func setupLogging(level *string) {
|
||||
ll, err := logrus.ParseLevel(*level)
|
||||
if err != nil {
|
||||
ll = logrus.InfoLevel
|
||||
}
|
||||
log.SetLevel(ll)
|
||||
}
|
||||
|
||||
Binary file not shown.
166
natsapi/svc.go
Normal file
166
natsapi/svc.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
_ "github.com/lib/pq"
|
||||
nats "github.com/nats-io/nats.go"
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/ugorji/go/codec"
|
||||
trmm "github.com/wh1te909/trmm-shared"
|
||||
)
|
||||
|
||||
func Svc(logger *logrus.Logger, cfg string) {
|
||||
logger.Debugln("Starting Svc()")
|
||||
db, r, err := GetConfig(cfg)
|
||||
if err != nil {
|
||||
logger.Fatalln(err)
|
||||
}
|
||||
|
||||
opts := setupNatsOptions(r.Key)
|
||||
nc, err := nats.Connect(r.NatsURL, opts...)
|
||||
if err != nil {
|
||||
logger.Fatalln(err)
|
||||
}
|
||||
|
||||
nc.Subscribe("*", func(msg *nats.Msg) {
|
||||
var mh codec.MsgpackHandle
|
||||
mh.MapType = reflect.TypeOf(map[string]interface{}(nil))
|
||||
mh.RawToString = true
|
||||
dec := codec.NewDecoderBytes(msg.Data, &mh)
|
||||
|
||||
switch msg.Reply {
|
||||
case "agent-hello":
|
||||
go func() {
|
||||
var p trmm.CheckInNats
|
||||
if err := dec.Decode(&p); err == nil {
|
||||
loc, _ := time.LoadLocation("UTC")
|
||||
now := time.Now().In(loc)
|
||||
logger.Debugln("Hello", p, now)
|
||||
stmt := `
|
||||
UPDATE agents_agent
|
||||
SET last_seen=$1, version=$2
|
||||
WHERE agents_agent.agent_id=$3;
|
||||
`
|
||||
|
||||
_, err = db.Exec(stmt, now, p.Version, p.Agentid)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
case "agent-publicip":
|
||||
go func() {
|
||||
var p trmm.PublicIPNats
|
||||
if err := dec.Decode(&p); err == nil {
|
||||
logger.Debugln("Public IP", p)
|
||||
stmt := `
|
||||
UPDATE agents_agent SET public_ip=$1 WHERE agents_agent.agent_id=$2;`
|
||||
_, err = db.Exec(stmt, p.PublicIP, p.Agentid)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
case "agent-agentinfo":
|
||||
go func() {
|
||||
var r trmm.AgentInfoNats
|
||||
if err := dec.Decode(&r); err == nil {
|
||||
stmt := `
|
||||
UPDATE agents_agent
|
||||
SET hostname=$1, operating_system=$2,
|
||||
plat=$3, total_ram=$4, boot_time=$5, needs_reboot=$6, logged_in_username=$7
|
||||
WHERE agents_agent.agent_id=$8;`
|
||||
|
||||
logger.Debugln("Info", r)
|
||||
_, err = db.Exec(stmt, r.Hostname, r.OS, r.Platform, r.TotalRAM, r.BootTime, r.RebootNeeded, r.Username, r.Agentid)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
|
||||
if r.Username != "None" {
|
||||
stmt = `UPDATE agents_agent SET last_logged_in_user=$1 WHERE agents_agent.agent_id=$2;`
|
||||
logger.Debugln("Updating last logged in user:", r.Username)
|
||||
_, err = db.Exec(stmt, r.Username, r.Agentid)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
case "agent-disks":
|
||||
go func() {
|
||||
var r trmm.WinDisksNats
|
||||
if err := dec.Decode(&r); err == nil {
|
||||
logger.Debugln("Disks", r)
|
||||
b, err := json.Marshal(r.Disks)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
return
|
||||
}
|
||||
stmt := `
|
||||
UPDATE agents_agent SET disks=$1 WHERE agents_agent.agent_id=$2;`
|
||||
|
||||
_, err = db.Exec(stmt, b, r.Agentid)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
case "agent-winsvc":
|
||||
go func() {
|
||||
var r trmm.WinSvcNats
|
||||
if err := dec.Decode(&r); err == nil {
|
||||
logger.Debugln("WinSvc", r)
|
||||
b, err := json.Marshal(r.WinSvcs)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
return
|
||||
}
|
||||
|
||||
stmt := `
|
||||
UPDATE agents_agent SET services=$1 WHERE agents_agent.agent_id=$2;`
|
||||
|
||||
_, err = db.Exec(stmt, b, r.Agentid)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
case "agent-wmi":
|
||||
go func() {
|
||||
var r trmm.WinWMINats
|
||||
if err := dec.Decode(&r); err == nil {
|
||||
logger.Debugln("WMI", r)
|
||||
b, err := json.Marshal(r.WMI)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
return
|
||||
}
|
||||
stmt := `
|
||||
UPDATE agents_agent SET wmi_detail=$1 WHERE agents_agent.agent_id=$2;`
|
||||
|
||||
_, err = db.Exec(stmt, b, r.Agentid)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
})
|
||||
|
||||
nc.Flush()
|
||||
|
||||
if err := nc.LastError(); err != nil {
|
||||
logger.Fatalln(err)
|
||||
}
|
||||
runtime.Goexit()
|
||||
}
|
||||
257
natsapi/tasks.go
257
natsapi/tasks.go
@@ -1,257 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"math/rand"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
_ "github.com/lib/pq"
|
||||
nats "github.com/nats-io/nats.go"
|
||||
"github.com/ugorji/go/codec"
|
||||
)
|
||||
|
||||
type JsonFile struct {
|
||||
Agents []string `json:"agents"`
|
||||
Key string `json:"key"`
|
||||
NatsURL string `json:"natsurl"`
|
||||
}
|
||||
|
||||
type DjangoConfig struct {
|
||||
Key string `json:"key"`
|
||||
NatsURL string `json:"natsurl"`
|
||||
User string `json:"user"`
|
||||
Pass string `json:"pass"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
DBName string `json:"dbname"`
|
||||
}
|
||||
|
||||
type Agent struct {
|
||||
ID int `db:"id"`
|
||||
AgentID string `db:"agent_id"`
|
||||
}
|
||||
|
||||
type Recovery struct {
|
||||
Func string `json:"func"`
|
||||
Data map[string]string `json:"payload"`
|
||||
}
|
||||
|
||||
func setupNatsOptions(key string) []nats.Option {
|
||||
opts := []nats.Option{
|
||||
nats.Name("TacticalRMM"),
|
||||
nats.UserInfo("tacticalrmm", key),
|
||||
nats.ReconnectWait(time.Second * 2),
|
||||
nats.RetryOnFailedConnect(true),
|
||||
nats.MaxReconnects(3),
|
||||
nats.ReconnectBufSize(-1),
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func CheckIn(file string) {
|
||||
agents, db, r, err := GetAgents(file)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
var payload []byte
|
||||
ret := codec.NewEncoderBytes(&payload, new(codec.MsgpackHandle))
|
||||
ret.Encode(map[string]string{"func": "ping"})
|
||||
|
||||
opts := setupNatsOptions(r.Key)
|
||||
|
||||
nc, err := nats.Connect(r.NatsURL, opts...)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
defer nc.Close()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(len(agents))
|
||||
|
||||
loc, _ := time.LoadLocation("UTC")
|
||||
now := time.Now().In(loc)
|
||||
|
||||
for _, a := range agents {
|
||||
go func(id string, pk int, nc *nats.Conn, wg *sync.WaitGroup, db *sqlx.DB, now time.Time) {
|
||||
defer wg.Done()
|
||||
|
||||
var resp string
|
||||
var mh codec.MsgpackHandle
|
||||
mh.RawToString = true
|
||||
|
||||
time.Sleep(time.Duration(randRange(100, 1500)) * time.Millisecond)
|
||||
out, err := nc.Request(id, payload, 1*time.Second)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
dec := codec.NewDecoderBytes(out.Data, &mh)
|
||||
if err := dec.Decode(&resp); err == nil {
|
||||
if resp == "pong" {
|
||||
_, err = db.NamedExec(
|
||||
`UPDATE agents_agent SET last_seen=:lastSeen WHERE agents_agent.id=:pk`,
|
||||
map[string]interface{}{"lastSeen": now, "pk": pk},
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}(a.AgentID, a.ID, nc, &wg, db, now)
|
||||
}
|
||||
wg.Wait()
|
||||
db.Close()
|
||||
}
|
||||
|
||||
func GetAgents(file string) (agents []Agent, db *sqlx.DB, r DjangoConfig, err error) {
|
||||
jret, _ := ioutil.ReadFile(file)
|
||||
err = json.Unmarshal(jret, &r)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s "+
|
||||
"password=%s dbname=%s sslmode=disable",
|
||||
r.Host, r.Port, r.User, r.Pass, r.DBName)
|
||||
|
||||
db, err = sqlx.Connect("postgres", psqlInfo)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
db.SetMaxOpenConns(15)
|
||||
|
||||
agent := Agent{}
|
||||
rows, err := db.Queryx("SELECT agents_agent.id, agents_agent.agent_id FROM agents_agent")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
err := rows.StructScan(&agent)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func AgentInfo(file string) {
|
||||
agents, db, r, err := GetAgents(file)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
var payload []byte
|
||||
ret := codec.NewEncoderBytes(&payload, new(codec.MsgpackHandle))
|
||||
ret.Encode(map[string]string{"func": "agentinfo"})
|
||||
|
||||
opts := setupNatsOptions(r.Key)
|
||||
|
||||
nc, err := nats.Connect(r.NatsURL, opts...)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
defer nc.Close()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(len(agents))
|
||||
|
||||
for _, a := range agents {
|
||||
go func(id string, pk int, nc *nats.Conn, wg *sync.WaitGroup, db *sqlx.DB) {
|
||||
defer wg.Done()
|
||||
|
||||
var r AgentInfoRet
|
||||
var mh codec.MsgpackHandle
|
||||
mh.RawToString = true
|
||||
|
||||
time.Sleep(time.Duration(randRange(100, 1500)) * time.Millisecond)
|
||||
out, err := nc.Request(id, payload, 1*time.Second)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
dec := codec.NewDecoderBytes(out.Data, &mh)
|
||||
if err := dec.Decode(&r); err == nil {
|
||||
stmt := `
|
||||
UPDATE agents_agent
|
||||
SET version=$1, hostname=$2, operating_system=$3,
|
||||
plat=$4, total_ram=$5, boot_time=$6, needs_reboot=$7, logged_in_username=$8
|
||||
WHERE agents_agent.id=$9;`
|
||||
|
||||
_, err = db.Exec(stmt, r.Version, r.Hostname, r.OS, r.Platform, r.TotalRAM, r.BootTime, r.RebootNeeded, r.Username, pk)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
if r.Username != "None" {
|
||||
stmt = `UPDATE agents_agent SET last_logged_in_user=$1 WHERE agents_agent.id=$2;`
|
||||
_, err = db.Exec(stmt, r.Username, pk)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}(a.AgentID, a.ID, nc, &wg, db)
|
||||
}
|
||||
wg.Wait()
|
||||
db.Close()
|
||||
}
|
||||
|
||||
func GetWMI(file string) {
|
||||
var result JsonFile
|
||||
var payload []byte
|
||||
var mh codec.MsgpackHandle
|
||||
mh.RawToString = true
|
||||
ret := codec.NewEncoderBytes(&payload, new(codec.MsgpackHandle))
|
||||
ret.Encode(map[string]string{"func": "wmi"})
|
||||
|
||||
jret, _ := ioutil.ReadFile(file)
|
||||
err := json.Unmarshal(jret, &result)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
opts := setupNatsOptions(result.Key)
|
||||
|
||||
nc, err := nats.Connect(result.NatsURL, opts...)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
defer nc.Close()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(len(result.Agents))
|
||||
|
||||
for _, id := range result.Agents {
|
||||
go func(id string, nc *nats.Conn, wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
time.Sleep(time.Duration(randRange(0, 28)) * time.Second)
|
||||
nc.Publish(id, payload)
|
||||
}(id, nc, &wg)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func randRange(min, max int) int {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
return rand.Intn(max-min) + min
|
||||
}
|
||||
|
||||
type AgentInfoRet struct {
|
||||
AgentPK int `json:"id"`
|
||||
Version string `json:"version"`
|
||||
Username string `json:"logged_in_username"`
|
||||
Hostname string `json:"hostname"`
|
||||
OS string `json:"operating_system"`
|
||||
Platform string `json:"plat"`
|
||||
TotalRAM float64 `json:"total_ram"`
|
||||
BootTime int64 `json:"boot_time"`
|
||||
RebootNeeded bool `json:"needs_reboot"`
|
||||
}
|
||||
16
natsapi/types.go
Normal file
16
natsapi/types.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package api
|
||||
|
||||
type Agent struct {
|
||||
ID int `db:"id"`
|
||||
AgentID string `db:"agent_id"`
|
||||
}
|
||||
|
||||
type DjangoConfig struct {
|
||||
Key string `json:"key"`
|
||||
NatsURL string `json:"natsurl"`
|
||||
User string `json:"user"`
|
||||
Pass string `json:"pass"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
DBName string `json:"dbname"`
|
||||
}
|
||||
53
natsapi/utils.go
Normal file
53
natsapi/utils.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
_ "github.com/lib/pq"
|
||||
nats "github.com/nats-io/nats.go"
|
||||
trmm "github.com/wh1te909/trmm-shared"
|
||||
)
|
||||
|
||||
func setupNatsOptions(key string) []nats.Option {
|
||||
opts := []nats.Option{
|
||||
nats.Name("TacticalRMM"),
|
||||
nats.UserInfo("tacticalrmm", key),
|
||||
nats.ReconnectWait(time.Second * 2),
|
||||
nats.RetryOnFailedConnect(true),
|
||||
nats.MaxReconnects(-1),
|
||||
nats.ReconnectBufSize(-1),
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func GetConfig(cfg string) (db *sqlx.DB, r DjangoConfig, err error) {
|
||||
if cfg == "" {
|
||||
cfg = "/rmm/api/tacticalrmm/nats-api.conf"
|
||||
if !trmm.FileExists(cfg) {
|
||||
err = errors.New("unable to find config file")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
jret, _ := ioutil.ReadFile(cfg)
|
||||
err = json.Unmarshal(jret, &r)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s "+
|
||||
"password=%s dbname=%s sslmode=disable",
|
||||
r.Host, r.Port, r.User, r.Pass, r.DBName)
|
||||
|
||||
db, err = sqlx.Connect("postgres", psqlInfo)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
db.SetMaxOpenConns(20)
|
||||
return
|
||||
}
|
||||
21
restore.sh
21
restore.sh
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_VERSION="31"
|
||||
SCRIPT_VERSION="32"
|
||||
SCRIPT_URL='https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh'
|
||||
|
||||
sudo apt update
|
||||
@@ -39,20 +39,22 @@ if [ ! "$osname" = "ubuntu" ] && [ ! "$osname" = "debian" ]; then
|
||||
fi
|
||||
|
||||
# determine system
|
||||
if ([ "$osname" = "ubuntu" ] && [ "$fullrelno" = "20.04" ]) || ([ "$osname" = "debian" ] && [ $relno -eq 10 ]); then
|
||||
if ([ "$osname" = "ubuntu" ] && [ "$fullrelno" = "20.04" ]) || ([ "$osname" = "debian" ] && [ $relno -ge 10 ]); then
|
||||
echo $fullrel
|
||||
else
|
||||
echo $fullrel
|
||||
echo -ne "${RED}Only Ubuntu release 20.04 and Debian 10 are supported\n"
|
||||
echo -ne "${RED}Supported versions: Ubuntu 20.04, Debian 10 and 11\n"
|
||||
echo -ne "Your system does not appear to be supported${NC}\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ([ "$osname" = "ubuntu" ]); then
|
||||
mongodb_repo="deb [arch=amd64] https://repo.mongodb.org/apt/$osname $codename/mongodb-org/4.4 multiverse"
|
||||
# there is no bullseye repo yet for mongo so just use buster on debian 11
|
||||
elif ([ "$osname" = "debian" ] && [ $relno -eq 11 ]); then
|
||||
mongodb_repo="deb [arch=amd64] https://repo.mongodb.org/apt/$osname buster/mongodb-org/4.4 main"
|
||||
else
|
||||
mongodb_repo="deb [arch=amd64] https://repo.mongodb.org/apt/$osname $codename/mongodb-org/4.4 main"
|
||||
|
||||
fi
|
||||
|
||||
postgresql_repo="deb [arch=amd64] https://apt.postgresql.org/pub/repos/apt/ $codename-pgdg main"
|
||||
@@ -164,14 +166,14 @@ print_green 'Installing Python 3.9'
|
||||
sudo apt install -y build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libreadline-dev libffi-dev libsqlite3-dev libbz2-dev
|
||||
numprocs=$(nproc)
|
||||
cd ~
|
||||
wget https://www.python.org/ftp/python/3.9.6/Python-3.9.6.tgz
|
||||
tar -xf Python-3.9.6.tgz
|
||||
cd Python-3.9.6
|
||||
wget https://www.python.org/ftp/python/3.9.9/Python-3.9.9.tgz
|
||||
tar -xf Python-3.9.9.tgz
|
||||
cd Python-3.9.9
|
||||
./configure --enable-optimizations
|
||||
make -j $numprocs
|
||||
sudo make altinstall
|
||||
cd ~
|
||||
sudo rm -rf Python-3.9.6 Python-3.9.6.tgz
|
||||
sudo rm -rf Python-3.9.9 Python-3.9.9.tgz
|
||||
|
||||
|
||||
print_green 'Installing redis and git'
|
||||
@@ -304,6 +306,7 @@ pip install --no-cache-dir setuptools==${SETUPTOOLS_VER} wheel==${WHEEL_VER}
|
||||
pip install --no-cache-dir -r /rmm/api/tacticalrmm/requirements.txt
|
||||
python manage.py migrate
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py create_natsapi_conf
|
||||
python manage.py reload_nats
|
||||
deactivate
|
||||
|
||||
@@ -333,7 +336,7 @@ sudo chown -R $USER:$GROUP /home/${USER}/.cache
|
||||
print_green 'Enabling Services'
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
for i in celery.service celerybeat.service rmm.service daphne.service nginx
|
||||
for i in celery.service celerybeat.service rmm.service daphne.service nats-api.service nginx
|
||||
do
|
||||
sudo systemctl enable ${i}
|
||||
sudo systemctl stop ${i}
|
||||
|
||||
37
scripts/Win_Bluescreen_Report.ps1
Normal file
37
scripts/Win_Bluescreen_Report.ps1
Normal file
@@ -0,0 +1,37 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Bluescreen - Reports bluescreens
|
||||
.DESCRIPTION
|
||||
This will check for Bluescreen events on your system. If parameter provided, goes back that number of days
|
||||
.EXAMPLE
|
||||
365
|
||||
.NOTES
|
||||
v1 bbrendon 2/2021
|
||||
v1.1 silversword updating with parameters 11/2021
|
||||
#>
|
||||
|
||||
|
||||
$param1 = $args[0]
|
||||
|
||||
$ErrorActionPreference = 'silentlycontinue'
|
||||
if ($Args.Count -eq 0) {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
}
|
||||
else {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day $param1)
|
||||
}
|
||||
|
||||
|
||||
if (Get-WinEvent -FilterHashtable @{LogName = 'application'; ID = '1001'; ProviderName = 'Windows Error Reporting'; Level = 4; Data = 'BlueScreen'; StartTime = $TimeSpan }) {
|
||||
Write-Output "There has been bluescreen events detected on your system"
|
||||
Get-WinEvent -FilterHashtable @{LogName = 'application'; ID = '1001'; ProviderName = 'Windows Error Reporting'; Level = 4; Data = 'BlueScreen'; StartTime = $TimeSpan }
|
||||
exit 1
|
||||
}
|
||||
|
||||
{
|
||||
else
|
||||
Write-Output "No bluescreen events detected in the past 24 hours."
|
||||
exit 0
|
||||
}
|
||||
|
||||
Exit $LASTEXITCODE
|
||||
@@ -1,20 +0,0 @@
|
||||
# This will check for Bluescreen events on your system
|
||||
|
||||
$ErrorActionPreference= 'silentlycontinue'
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
|
||||
if (Get-WinEvent -FilterHashtable @{LogName='application';ID='1001';ProviderName='Windows Error Reporting';Level=4;Data='BlueScreen';StartTime=$TimeSpan})
|
||||
|
||||
{
|
||||
Write-Output "There has been bluescreen events detected on your system"
|
||||
Get-WinEvent -FilterHashtable @{LogName='application';ID='1001';ProviderName='Windows Error Reporting';Level=4;Data='BlueScreen';StartTime=$TimeSpan}
|
||||
exit 1
|
||||
}
|
||||
|
||||
{
|
||||
else
|
||||
Write-Output "No bluescreen events detected in the past 24 hours."
|
||||
exit 0
|
||||
}
|
||||
|
||||
Exit $LASTEXITCODE
|
||||
3
scripts/Win_Chocolatey_List_Installed.bat
Normal file
3
scripts/Win_Chocolatey_List_Installed.bat
Normal file
@@ -0,0 +1,3 @@
|
||||
rem List apps installed by Chocolatey
|
||||
|
||||
choco list --localonly
|
||||
@@ -10,48 +10,54 @@
|
||||
.PARAMETER PackageName
|
||||
Use this to specify which software to install eg: PackageName googlechrome
|
||||
.EXAMPLE
|
||||
Hosts 20 PackageName googlechrome
|
||||
-Hosts 20 -PackageName googlechrome
|
||||
.EXAMPLE
|
||||
Mode upgrade Hosts 50
|
||||
-Mode upgrade -Hosts 50
|
||||
.EXAMPLE
|
||||
Mode uninstall PackageName googlechrome
|
||||
-Mode uninstall -PackageName googlechrome
|
||||
.NOTES
|
||||
9/2021 v1 Initial release by @silversword411 and @bradhawkins
|
||||
11/14/2021 v1.1 Fixing typos and logic flow
|
||||
#>
|
||||
|
||||
param (
|
||||
[string] $Hosts = "0",
|
||||
[Int] $Hosts = "0",
|
||||
[string] $PackageName,
|
||||
[string] $Mode = "install",
|
||||
[string] $Mode = "install"
|
||||
)
|
||||
|
||||
$ErrorCount = 0
|
||||
|
||||
if (!$PackageName) {
|
||||
write-output "No choco package name provided, please include Example: `"PackageName googlechrome`" `n"
|
||||
$ErrorCount += 1
|
||||
if ($Mode -ne "upgrade" -and !$PackageName) {
|
||||
write-output "No choco package name provided, please include Example: `"-PackageName googlechrome`" `n"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
if (!$Mode -eq "upgrade") {
|
||||
$randrange = ($Hosts + 1) * 10
|
||||
if ($Hosts -ne "0") {
|
||||
$randrange = ($Hosts + 1) * 6
|
||||
# Write-Output "Calculating rnd"
|
||||
# Write-Output "randrange $randrange"
|
||||
$rnd = Get-Random -Minimum 1 -Maximum $randrange;
|
||||
Start-Sleep -Seconds $rnd;
|
||||
choco ugrade -y all
|
||||
Write-Output "Running upgrade"
|
||||
Exit 0
|
||||
}
|
||||
|
||||
if (!$Hosts -eq "0") {
|
||||
write-output "No Hosts Specified, running concurrently"
|
||||
choco $Mode $PackageName -y
|
||||
Exit 0
|
||||
# Write-Output "rnd=$rnd"
|
||||
}
|
||||
else {
|
||||
$randrange = ($Hosts + 1) * 6
|
||||
$rnd = Get-Random -Minimum 1 -Maximum $randrange;
|
||||
$rnd = "1"
|
||||
# Write-Output "rnd set to 1 manually"
|
||||
# Write-Output "rnd=$rnd"
|
||||
}
|
||||
|
||||
if ($Mode -eq "upgrade") {
|
||||
# Write-Output "Starting Upgrade"
|
||||
Start-Sleep -Seconds $rnd;
|
||||
choco $Mode $PackageName -y
|
||||
choco upgrade -y all
|
||||
# Write-Output "Running upgrade"
|
||||
Exit 0
|
||||
}
|
||||
|
||||
# write-output "Running install/uninstall mode"
|
||||
Start-Sleep -Seconds $rnd;
|
||||
choco $Mode $PackageName -y
|
||||
Exit 0
|
||||
|
||||
|
||||
Exit $LASTEXITCODE
|
||||
@@ -1,3 +1,21 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Enables Windows Defender and sets preferences to lock Defender down
|
||||
.DESCRIPTION
|
||||
Windows Defender in its default configuration does basic protections. Running this script will enable many additional settings to increase security.
|
||||
.PARAMETER NoControlledFolders
|
||||
Adding this parameter will not enable Controlled Folders
|
||||
.EXAMPLE
|
||||
-NoControlledFolders
|
||||
.NOTES
|
||||
9/2021 v1 Initial release dinger1986
|
||||
11/24/2021 v1.1 adding command parameters for Controller Folder access by Tremor and silversword
|
||||
#>
|
||||
|
||||
param (
|
||||
[switch] $NoControlledFolders
|
||||
)
|
||||
|
||||
# Verifies that script is running on Windows 10 or greater
|
||||
function Check-IsWindows10
|
||||
{
|
||||
@@ -92,8 +110,14 @@ if (!(Check-IsWindows10-1709))
|
||||
|
||||
Write-Host # `nUpdating Windows Defender Exploit Guard settings`n# -ForegroundColor Green
|
||||
|
||||
Write-Host # Enabling Controlled Folder Access and setting to block mode#
|
||||
Set-MpPreference -EnableControlledFolderAccess Enabled
|
||||
if ($NoControlledFolders) # Check if user has run with -NoControlledFolders parameter
|
||||
{
|
||||
Write-Host "Skipping enabling Controlled folders"
|
||||
}
|
||||
else {
|
||||
Write-Host "Enabling Controlled folders"
|
||||
Set-MpPreference -EnableControlledFolderAccess Enabled
|
||||
}
|
||||
|
||||
Write-Host # Enabling Network Protection and setting to block mode#
|
||||
Set-MpPreference -EnableNetworkProtection Enabled
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
Write-Host "Running Windows Defender Full Scan in Background" -ForegroundColor Green
|
||||
Write-Host "Running Windows Defender Full Scan in Background" -ForegroundColor Green
|
||||
Start-MpScan -ScanPath C:\ -ScanType FullScan -AsJob
|
||||
@@ -1,2 +1,2 @@
|
||||
Write-Host "Running Windows Defender Quick Scan in Background" -ForegroundColor Green
|
||||
Write-Host "Running Windows Defender Quick Scan in Background" -ForegroundColor Green
|
||||
Start-MpScan -ScanType QuickScan -AsJob
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
This will check Event Log for Windows Defender Malware and Antispyware reports, otherwise will report as Healthy. By default if no command parameter is provided it will check the last 1 day (good for a scheduled daily task).
|
||||
If a number is provided as a command parameter it will search back that number of days back provided (good for collecting all AV alerts on the computer).
|
||||
.EXAMPLE
|
||||
Win_Defender_Status_reports.ps1 365
|
||||
365
|
||||
.NOTES
|
||||
v1 dinger initial release 2021
|
||||
v1.1 bdrayer Adding full message output if items found
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
# Checks local disks for errors reported in event viewer within the last 24 hours
|
||||
|
||||
$ErrorActionPreference = 'silentlycontinue'
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
if (Get-WinEvent -FilterHashtable @{LogName = 'system'; ID = '11', '9', '15', '52', '129', '7', '98'; Level = 2, 3; ProviderName = '*disk*', '*storsvc*', '*ntfs*'; StartTime = $TimeSpan } -MaxEvents 10 | Where-Object -Property Message -Match Volume*)
|
||||
{
|
||||
Write-Output "Disk errors detected please investigate"
|
||||
Get-WinEvent -FilterHashtable @{LogName = 'system'; ID = '11', '9', '15', '52', '129', '7', '98'; Level = 2, 3; ProviderName = '*disk*', '*storsvc*', '*ntfs*'; StartTime = $TimeSpan }
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
else {
|
||||
Write-Output "Disks are Healthy"
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
Exit $LASTEXITCODE
|
||||
# Checks local disks for errors reported in event viewer within the last 24 hours
|
||||
|
||||
$ErrorActionPreference = 'silentlycontinue'
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
if (Get-WinEvent -FilterHashtable @{LogName = 'system'; ID = '11', '9', '15', '52', '129', '7', '98'; Level = 2, 3; ProviderName = '*disk*', '*storsvc*', '*ntfs*'; StartTime = $TimeSpan } -MaxEvents 10 | Where-Object -Property Message -Match Volume*)
|
||||
{
|
||||
Write-Output "Disk errors detected please investigate"
|
||||
Get-WinEvent -FilterHashtable @{LogName = 'system'; ID = '11', '9', '15', '52', '129', '7', '98'; Level = 2, 3; ProviderName = '*disk*', '*storsvc*', '*ntfs*'; StartTime = $TimeSpan }
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
else {
|
||||
Write-Output "Disks are Healthy"
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
Exit $LASTEXITCODE
|
||||
@@ -1,2 +1,2 @@
|
||||
#Install Adobe Reader DC
|
||||
#Install Adobe Reader DC
|
||||
choco install adobereader -params '"/EnableUpdateService /UpdateMode:3 /DesktopIcon"' --yes --no-progress --force
|
||||
@@ -1,16 +1,32 @@
|
||||
$ErrorActionPreference= 'silentlycontinue'
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
if (Get-WinEvent -FilterHashtable @{LogName='security';ID='4720','4720','4728','4732','4756','4767';StartTime=$TimeSpan})
|
||||
{
|
||||
Write-Output "A change has been made to local users"
|
||||
Get-WinEvent -FilterHashtable @{LogName='security';ID='4720','4720','4728','4732','4756','4767';StartTime=$TimeSpan}
|
||||
exit 1
|
||||
<#
|
||||
.Synopsis
|
||||
Event Viewer - New User Notification
|
||||
.DESCRIPTION
|
||||
Event Viewer Monitor - Notify when new Local user is created
|
||||
.EXAMPLE
|
||||
365
|
||||
.NOTES
|
||||
v1 dinger initial release
|
||||
v1.1 silversword adding parameter options 11/2021
|
||||
#>
|
||||
|
||||
$ErrorActionPreference = 'silentlycontinue'
|
||||
if ($Args.Count -eq 0) {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
}
|
||||
else {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day $param1)
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
Write-Output "No changes all looks fine"
|
||||
exit 0
|
||||
if (Get-WinEvent -FilterHashtable @{LogName = 'security'; ID = '4720', '4720', '4728', '4732', '4756', '4767'; StartTime = $TimeSpan }) {
|
||||
Write-Output "A change has been made to local users"
|
||||
Get-WinEvent -FilterHashtable @{LogName = 'security'; ID = '4720', '4720', '4728', '4732', '4756', '4767'; StartTime = $TimeSpan }
|
||||
exit 1
|
||||
}
|
||||
|
||||
else {
|
||||
Write-Output "No changes all looks fine"
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
|
||||
9
scripts/Win_Printer_ClearandRestart.bat
Normal file
9
scripts/Win_Printer_ClearandRestart.bat
Normal file
@@ -0,0 +1,9 @@
|
||||
@echo off
|
||||
|
||||
sc stop spooler
|
||||
|
||||
timeout /t 5 /nobreak > NUL
|
||||
|
||||
del C:\Windows\System32\spool\printers\* /Q /F /S
|
||||
|
||||
sc start spooler
|
||||
35
scripts/Win_Software_Install_Report.ps1
Normal file
35
scripts/Win_Software_Install_Report.ps1
Normal file
@@ -0,0 +1,35 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Software Install - Reports new installs
|
||||
.DESCRIPTION
|
||||
This will check for software install events in the application Event Viewer log
|
||||
If a number is provided as a command parameter it will search that number of days back.
|
||||
.EXAMPLE
|
||||
365
|
||||
.NOTES
|
||||
v1 silversword initial release 11/2021
|
||||
#>
|
||||
|
||||
$param1 = $args[0]
|
||||
|
||||
$ErrorActionPreference = 'silentlycontinue'
|
||||
if ($Args.Count -eq 0) {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
}
|
||||
else {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day $param1)
|
||||
}
|
||||
|
||||
if (Get-WinEvent -FilterHashtable @{LogName = 'application'; ID = '11707'; StartTime = $TimeSpan }) {
|
||||
Write-Output "Software installed"
|
||||
Get-WinEvent -FilterHashtable @{LogName = 'application'; ID = '11707'; StartTime = $TimeSpan }
|
||||
exit 1
|
||||
}
|
||||
|
||||
{
|
||||
else
|
||||
Write-Output "No Software install events detected in the past 24 hours."
|
||||
exit 0
|
||||
}
|
||||
|
||||
Exit $LASTEXITCODE
|
||||
10
scripts/Win_Splashtop_Get_ID.ps1
Normal file
10
scripts/Win_Splashtop_Get_ID.ps1
Normal file
@@ -0,0 +1,10 @@
|
||||
# Retrieve Splashtop SUUID from device registry.
|
||||
|
||||
if (!$ErrorCount -eq 0) {
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
$key = 'HKLM:\SOFTWARE\WOW6432Node\Splashtop Inc.\Splashtop Remote Server'
|
||||
(Get-ItemProperty -Path $key -Name SUUID).SUUID
|
||||
Write-Output $key.SUUID
|
||||
@@ -1,4 +1,4 @@
|
||||
Function Start-Cleanup {
|
||||
Function Start-Cleanup {
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Automate cleaning up a C:\ drive with low disk space
|
||||
|
||||
16
scripts/Win_Storage_CheckPools.ps1
Normal file
16
scripts/Win_Storage_CheckPools.ps1
Normal file
@@ -0,0 +1,16 @@
|
||||
$pools = Get-VirtualDisk | select -ExpandProperty HealthStatus
|
||||
|
||||
$err = $False
|
||||
|
||||
ForEach ($pool in $pools) {
|
||||
if ($pool -ne "Healthy") {
|
||||
$err = $True
|
||||
}
|
||||
}
|
||||
|
||||
if ($err) {
|
||||
exit 1
|
||||
}
|
||||
else {
|
||||
exit 0
|
||||
}
|
||||
@@ -1,17 +1,34 @@
|
||||
$ErrorActionPreference= 'silentlycontinue'
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
if (Get-WinEvent -FilterHashtable @{LogName='Microsoft-Windows-TaskScheduler/Operational';ID='106';StartTime=$TimeSpan} | Where-Object -Property Message -notlike *$env:COMPUTERNAME*)
|
||||
{
|
||||
Write-Output "New Task Has Been Added"
|
||||
Get-WinEvent -FilterHashtable @{LogName='Microsoft-Windows-TaskScheduler/Operational';ID='106';StartTime=$TimeSpan}
|
||||
Get-WinEvent -FilterHashtable @{LogName='Microsoft-Windows-TaskScheduler/Operational';ID='141';StartTime=$TimeSpan}
|
||||
exit 1
|
||||
<#
|
||||
.Synopsis
|
||||
Event Viewer - Task Scheduler New Item Notification
|
||||
.DESCRIPTION
|
||||
Event Viewer Monitor - Notify when new Task Scheduler item is created
|
||||
.EXAMPLE
|
||||
365
|
||||
.NOTES
|
||||
v1 dinger initial release
|
||||
v1.1 silversword adding command parameters 11/2021
|
||||
#>
|
||||
|
||||
|
||||
$ErrorActionPreference = 'silentlycontinue'
|
||||
if ($Args.Count -eq 0) {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day 1)
|
||||
}
|
||||
else {
|
||||
$TimeSpan = (Get-Date) - (New-TimeSpan -Day $param1)
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
Write-Output "No changes with Task Scheduler"
|
||||
exit 0
|
||||
if (Get-WinEvent -FilterHashtable @{LogName = 'Microsoft-Windows-TaskScheduler/Operational'; ID = '106'; StartTime = $TimeSpan } | Where-Object -Property Message -notlike *$env:COMPUTERNAME*) {
|
||||
Write-Output "New Task Has Been Added"
|
||||
Get-WinEvent -FilterHashtable @{LogName = 'Microsoft-Windows-TaskScheduler/Operational'; ID = '106'; StartTime = $TimeSpan }
|
||||
Get-WinEvent -FilterHashtable @{LogName = 'Microsoft-Windows-TaskScheduler/Operational'; ID = '141'; StartTime = $TimeSpan }
|
||||
exit 1
|
||||
}
|
||||
|
||||
else {
|
||||
Write-Output "No changes with Task Scheduler"
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
|
||||
29
scripts/Win_User_EnableDisable.ps1
Normal file
29
scripts/Win_User_EnableDisable.ps1
Normal file
@@ -0,0 +1,29 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
User - Enable or disable a user
|
||||
.DESCRIPTION
|
||||
Used to enable or disable local user
|
||||
.PARAMETER Name
|
||||
Required: Username
|
||||
.PARAMETER Enabled
|
||||
Required: yes/no
|
||||
.EXAMPLE
|
||||
-Name user -Enabled no
|
||||
.NOTES
|
||||
11/15/2021 v1 Initial release by @silversword411
|
||||
#>
|
||||
|
||||
param (
|
||||
[string] $Name,
|
||||
[string] $Enabled
|
||||
)
|
||||
|
||||
if (!$Enabled -or !$Name) {
|
||||
write-output "Missing required parameters. Please include Example: `"-Name username - -Enabled yes/no`" `n"
|
||||
Exit 1
|
||||
}
|
||||
else {
|
||||
net user $Name /active:$Enabled
|
||||
Write-Output "$Name set as active:$Enabled"
|
||||
Exit 0
|
||||
}
|
||||
484
scripts/Win_Win11_Ready.ps1
Normal file
484
scripts/Win_Win11_Ready.ps1
Normal file
@@ -0,0 +1,484 @@
|
||||
#=============================================================================================================================
|
||||
#
|
||||
#Script to check if a machine is ready for Windows 11
|
||||
#Returns 'Not Windows 11 Ready' if any of the checks fail, and returns 'Windows 11 Ready' if they all pass.
|
||||
#Useful if running in an automation policy and want to populate a custom field of all agents with their readiness.
|
||||
#This is a modified version of the official Microsoft script here: https://aka.ms/HWReadinessScript
|
||||
#
|
||||
#=============================================================================================================================
|
||||
|
||||
$exitCode = 0
|
||||
|
||||
[int]$MinOSDiskSizeGB = 64
|
||||
[int]$MinMemoryGB = 4
|
||||
[Uint32]$MinClockSpeedMHz = 1000
|
||||
[Uint32]$MinLogicalCores = 2
|
||||
[Uint16]$RequiredAddressWidth = 64
|
||||
|
||||
$PASS_STRING = "PASS"
|
||||
$FAIL_STRING = "FAIL"
|
||||
$FAILED_TO_RUN_STRING = "FAILED TO RUN"
|
||||
$UNDETERMINED_CAPS_STRING = "UNDETERMINED"
|
||||
$UNDETERMINED_STRING = "Undetermined"
|
||||
$CAPABLE_STRING = "Capable"
|
||||
$NOT_CAPABLE_STRING = "Not capable"
|
||||
$CAPABLE_CAPS_STRING = "CAPABLE"
|
||||
$NOT_CAPABLE_CAPS_STRING = "NOT CAPABLE"
|
||||
$STORAGE_STRING = "Storage"
|
||||
$OS_DISK_SIZE_STRING = "OSDiskSize"
|
||||
$MEMORY_STRING = "Memory"
|
||||
$SYSTEM_MEMORY_STRING = "System_Memory"
|
||||
$GB_UNIT_STRING = "GB"
|
||||
$TPM_STRING = "TPM"
|
||||
$TPM_VERSION_STRING = "TPMVersion"
|
||||
$PROCESSOR_STRING = "Processor"
|
||||
$SECUREBOOT_STRING = "SecureBoot"
|
||||
$I7_7820HQ_CPU_STRING = "i7-7820hq CPU"
|
||||
|
||||
# 0=name of check, 1=attribute checked, 2=value, 3=PASS/FAIL/UNDETERMINED
|
||||
$logFormat = '{0}: {1}={2}. {3}; '
|
||||
|
||||
# 0=name of check, 1=attribute checked, 2=value, 3=unit of the value, 4=PASS/FAIL/UNDETERMINED
|
||||
$logFormatWithUnit = '{0}: {1}={2}{3}. {4}; '
|
||||
|
||||
# 0=name of check.
|
||||
$logFormatReturnReason = '{0}, '
|
||||
|
||||
# 0=exception.
|
||||
$logFormatException = '{0}; '
|
||||
|
||||
# 0=name of check, 1= attribute checked and its value, 2=PASS/FAIL/UNDETERMINED
|
||||
$logFormatWithBlob = '{0}: {1}. {2}; '
|
||||
|
||||
# return returnCode is -1 when an exception is thrown. 1 if the value does not meet requirements. 0 if successful. -2 default, script didn't run.
|
||||
$outObject = @{ returnCode = -2; returnResult = $FAILED_TO_RUN_STRING; returnReason = ""; logging = "" }
|
||||
|
||||
# NOT CAPABLE(1) state takes precedence over UNDETERMINED(-1) state
|
||||
function Private:UpdateReturnCode {
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[ValidateRange(-2, 1)]
|
||||
[int] $ReturnCode
|
||||
)
|
||||
|
||||
Switch ($ReturnCode) {
|
||||
|
||||
0 {
|
||||
if ($outObject.returnCode -eq -2) {
|
||||
$outObject.returnCode = $ReturnCode
|
||||
}
|
||||
}
|
||||
1 {
|
||||
$outObject.returnCode = $ReturnCode
|
||||
}
|
||||
-1 {
|
||||
if ($outObject.returnCode -ne 1) {
|
||||
$outObject.returnCode = $ReturnCode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$Source = @"
|
||||
using Microsoft.Win32;
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
public class CpuFamilyResult
|
||||
{
|
||||
public bool IsValid { get; set; }
|
||||
public string Message { get; set; }
|
||||
}
|
||||
|
||||
public class CpuFamily
|
||||
{
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
public struct SYSTEM_INFO
|
||||
{
|
||||
public ushort ProcessorArchitecture;
|
||||
ushort Reserved;
|
||||
public uint PageSize;
|
||||
public IntPtr MinimumApplicationAddress;
|
||||
public IntPtr MaximumApplicationAddress;
|
||||
public IntPtr ActiveProcessorMask;
|
||||
public uint NumberOfProcessors;
|
||||
public uint ProcessorType;
|
||||
public uint AllocationGranularity;
|
||||
public ushort ProcessorLevel;
|
||||
public ushort ProcessorRevision;
|
||||
}
|
||||
|
||||
[DllImport("kernel32.dll")]
|
||||
internal static extern void GetNativeSystemInfo(ref SYSTEM_INFO lpSystemInfo);
|
||||
|
||||
public enum ProcessorFeature : uint
|
||||
{
|
||||
ARM_SUPPORTED_INSTRUCTIONS = 34
|
||||
}
|
||||
|
||||
[DllImport("kernel32.dll")]
|
||||
[return: MarshalAs(UnmanagedType.Bool)]
|
||||
static extern bool IsProcessorFeaturePresent(ProcessorFeature processorFeature);
|
||||
|
||||
private const ushort PROCESSOR_ARCHITECTURE_X86 = 0;
|
||||
private const ushort PROCESSOR_ARCHITECTURE_ARM64 = 12;
|
||||
private const ushort PROCESSOR_ARCHITECTURE_X64 = 9;
|
||||
|
||||
private const string INTEL_MANUFACTURER = "GenuineIntel";
|
||||
private const string AMD_MANUFACTURER = "AuthenticAMD";
|
||||
private const string QUALCOMM_MANUFACTURER = "Qualcomm Technologies Inc";
|
||||
|
||||
public static CpuFamilyResult Validate(string manufacturer, ushort processorArchitecture)
|
||||
{
|
||||
CpuFamilyResult cpuFamilyResult = new CpuFamilyResult();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(manufacturer))
|
||||
{
|
||||
cpuFamilyResult.IsValid = false;
|
||||
cpuFamilyResult.Message = "Manufacturer is null or empty";
|
||||
return cpuFamilyResult;
|
||||
}
|
||||
|
||||
string registryPath = "HKEY_LOCAL_MACHINE\\Hardware\\Description\\System\\CentralProcessor\\0";
|
||||
SYSTEM_INFO sysInfo = new SYSTEM_INFO();
|
||||
GetNativeSystemInfo(ref sysInfo);
|
||||
|
||||
switch (processorArchitecture)
|
||||
{
|
||||
case PROCESSOR_ARCHITECTURE_ARM64:
|
||||
|
||||
if (manufacturer.Equals(QUALCOMM_MANUFACTURER, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
bool isArmv81Supported = IsProcessorFeaturePresent(ProcessorFeature.ARM_SUPPORTED_INSTRUCTIONS);
|
||||
|
||||
if (!isArmv81Supported)
|
||||
{
|
||||
string registryName = "CP 4030";
|
||||
long registryValue = (long)Registry.GetValue(registryPath, registryName, -1);
|
||||
long atomicResult = (registryValue >> 20) & 0xF;
|
||||
|
||||
if (atomicResult >= 2)
|
||||
{
|
||||
isArmv81Supported = true;
|
||||
}
|
||||
}
|
||||
|
||||
cpuFamilyResult.IsValid = isArmv81Supported;
|
||||
cpuFamilyResult.Message = isArmv81Supported ? "" : "Processor does not implement ARM v8.1 atomic instruction";
|
||||
}
|
||||
else
|
||||
{
|
||||
cpuFamilyResult.IsValid = false;
|
||||
cpuFamilyResult.Message = "The processor isn't currently supported for Windows 11";
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case PROCESSOR_ARCHITECTURE_X64:
|
||||
case PROCESSOR_ARCHITECTURE_X86:
|
||||
|
||||
int cpuFamily = sysInfo.ProcessorLevel;
|
||||
int cpuModel = (sysInfo.ProcessorRevision >> 8) & 0xFF;
|
||||
int cpuStepping = sysInfo.ProcessorRevision & 0xFF;
|
||||
|
||||
if (manufacturer.Equals(INTEL_MANUFACTURER, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
try
|
||||
{
|
||||
cpuFamilyResult.IsValid = true;
|
||||
cpuFamilyResult.Message = "";
|
||||
|
||||
if (cpuFamily == 6)
|
||||
{
|
||||
if (cpuModel <= 95 && cpuModel != 85)
|
||||
{
|
||||
cpuFamilyResult.IsValid = false;
|
||||
cpuFamilyResult.Message = "";
|
||||
}
|
||||
else if ((cpuModel == 142 || cpuModel == 158) && cpuStepping == 9)
|
||||
{
|
||||
string registryName = "Platform Specific Field 1";
|
||||
int registryValue = (int)Registry.GetValue(registryPath, registryName, -1);
|
||||
|
||||
if ((cpuModel == 142 && registryValue != 16) || (cpuModel == 158 && registryValue != 8))
|
||||
{
|
||||
cpuFamilyResult.IsValid = false;
|
||||
}
|
||||
cpuFamilyResult.Message = "PlatformId " + registryValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
cpuFamilyResult.IsValid = false;
|
||||
cpuFamilyResult.Message = "Exception:" + ex.GetType().Name;
|
||||
}
|
||||
}
|
||||
else if (manufacturer.Equals(AMD_MANUFACTURER, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
cpuFamilyResult.IsValid = true;
|
||||
cpuFamilyResult.Message = "";
|
||||
|
||||
if (cpuFamily < 23 || (cpuFamily == 23 && (cpuModel == 1 || cpuModel == 17)))
|
||||
{
|
||||
cpuFamilyResult.IsValid = false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
cpuFamilyResult.IsValid = false;
|
||||
cpuFamilyResult.Message = "Unsupported Manufacturer: " + manufacturer + ", Architecture: " + processorArchitecture + ", CPUFamily: " + sysInfo.ProcessorLevel + ", ProcessorRevision: " + sysInfo.ProcessorRevision;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
cpuFamilyResult.IsValid = false;
|
||||
cpuFamilyResult.Message = "Unsupported CPU category. Manufacturer: " + manufacturer + ", Architecture: " + processorArchitecture + ", CPUFamily: " + sysInfo.ProcessorLevel + ", ProcessorRevision: " + sysInfo.ProcessorRevision;
|
||||
break;
|
||||
}
|
||||
return cpuFamilyResult;
|
||||
}
|
||||
}
|
||||
"@
|
||||
|
||||
# Storage
|
||||
try {
|
||||
$osDrive = Get-WmiObject -Class Win32_OperatingSystem | Select-Object -Property SystemDrive
|
||||
$osDriveSize = Get-WmiObject -Class Win32_LogicalDisk -filter "DeviceID='$($osDrive.SystemDrive)'" | Select-Object @{Name = "SizeGB"; Expression = { $_.Size / 1GB -as [int] } }
|
||||
|
||||
if ($null -eq $osDriveSize) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $STORAGE_STRING
|
||||
$outObject.logging += $logFormatWithBlob -f $STORAGE_STRING, "Storage is null", $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
elseif ($osDriveSize.SizeGB -lt $MinOSDiskSizeGB) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $STORAGE_STRING
|
||||
$outObject.logging += $logFormatWithUnit -f $STORAGE_STRING, $OS_DISK_SIZE_STRING, ($osDriveSize.SizeGB), $GB_UNIT_STRING, $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
else {
|
||||
$outObject.logging += $logFormatWithUnit -f $STORAGE_STRING, $OS_DISK_SIZE_STRING, ($osDriveSize.SizeGB), $GB_UNIT_STRING, $PASS_STRING
|
||||
UpdateReturnCode -ReturnCode 0
|
||||
}
|
||||
}
|
||||
catch {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormat -f $STORAGE_STRING, $OS_DISK_SIZE_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f "$($_.Exception.GetType().Name) $($_.Exception.Message)"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# Memory (bytes)
|
||||
try {
|
||||
$memory = Get-WmiObject Win32_PhysicalMemory | Measure-Object -Property Capacity -Sum | Select-Object @{Name = "SizeGB"; Expression = { $_.Sum / 1GB -as [int] } }
|
||||
|
||||
if ($null -eq $memory) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $MEMORY_STRING
|
||||
$outObject.logging += $logFormatWithBlob -f $MEMORY_STRING, "Memory is null", $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
elseif ($memory.SizeGB -lt $MinMemoryGB) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $MEMORY_STRING
|
||||
$outObject.logging += $logFormatWithUnit -f $MEMORY_STRING, $SYSTEM_MEMORY_STRING, ($memory.SizeGB), $GB_UNIT_STRING, $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
else {
|
||||
$outObject.logging += $logFormatWithUnit -f $MEMORY_STRING, $SYSTEM_MEMORY_STRING, ($memory.SizeGB), $GB_UNIT_STRING, $PASS_STRING
|
||||
UpdateReturnCode -ReturnCode 0
|
||||
}
|
||||
}
|
||||
catch {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormat -f $MEMORY_STRING, $SYSTEM_MEMORY_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f "$($_.Exception.GetType().Name) $($_.Exception.Message)"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# TPM
|
||||
try {
|
||||
$tpm = Get-Tpm
|
||||
|
||||
if ($null -eq $tpm) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $TPM_STRING
|
||||
$outObject.logging += $logFormatWithBlob -f $TPM_STRING, "TPM is null", $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
elseif ($tpm.TpmPresent) {
|
||||
$tpmVersion = Get-WmiObject -Class Win32_Tpm -Namespace root\CIMV2\Security\MicrosoftTpm | Select-Object -Property SpecVersion
|
||||
|
||||
if ($null -eq $tpmVersion.SpecVersion) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $TPM_STRING
|
||||
$outObject.logging += $logFormat -f $TPM_STRING, $TPM_VERSION_STRING, "null", $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
$majorVersion = $tpmVersion.SpecVersion.Split(",")[0] -as [int]
|
||||
if ($majorVersion -lt 2) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $TPM_STRING
|
||||
$outObject.logging += $logFormat -f $TPM_STRING, $TPM_VERSION_STRING, ($tpmVersion.SpecVersion), $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
else {
|
||||
$outObject.logging += $logFormat -f $TPM_STRING, $TPM_VERSION_STRING, ($tpmVersion.SpecVersion), $PASS_STRING
|
||||
UpdateReturnCode -ReturnCode 0
|
||||
}
|
||||
}
|
||||
else {
|
||||
if ($tpm.GetType().Name -eq "String") {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormat -f $TPM_STRING, $TPM_VERSION_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f $tpm
|
||||
}
|
||||
else {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $TPM_STRING
|
||||
$outObject.logging += $logFormat -f $TPM_STRING, $TPM_VERSION_STRING, ($tpm.TpmPresent), $FAIL_STRING
|
||||
}
|
||||
$exitCode = 1
|
||||
}
|
||||
}
|
||||
catch {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormat -f $TPM_STRING, $TPM_VERSION_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f "$($_.Exception.GetType().Name) $($_.Exception.Message)"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# CPU Details
|
||||
$cpuDetails;
|
||||
try {
|
||||
$cpuDetails = @(Get-WmiObject -Class Win32_Processor)[0]
|
||||
|
||||
if ($null -eq $cpuDetails) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$exitCode = 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $PROCESSOR_STRING
|
||||
$outObject.logging += $logFormatWithBlob -f $PROCESSOR_STRING, "CpuDetails is null", $FAIL_STRING
|
||||
}
|
||||
else {
|
||||
$processorCheckFailed = $false
|
||||
|
||||
# AddressWidth
|
||||
if ($null -eq $cpuDetails.AddressWidth -or $cpuDetails.AddressWidth -ne $RequiredAddressWidth) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$processorCheckFailed = $true
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# ClockSpeed is in MHz
|
||||
if ($null -eq $cpuDetails.MaxClockSpeed -or $cpuDetails.MaxClockSpeed -le $MinClockSpeedMHz) {
|
||||
UpdateReturnCode -ReturnCode 1;
|
||||
$processorCheckFailed = $true
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# Number of Logical Cores
|
||||
if ($null -eq $cpuDetails.NumberOfLogicalProcessors -or $cpuDetails.NumberOfLogicalProcessors -lt $MinLogicalCores) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$processorCheckFailed = $true
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# CPU Family
|
||||
Add-Type -TypeDefinition $Source
|
||||
$cpuFamilyResult = [CpuFamily]::Validate([String]$cpuDetails.Manufacturer, [uint16]$cpuDetails.Architecture)
|
||||
|
||||
$cpuDetailsLog = "{AddressWidth=$($cpuDetails.AddressWidth); MaxClockSpeed=$($cpuDetails.MaxClockSpeed); NumberOfLogicalCores=$($cpuDetails.NumberOfLogicalProcessors); Manufacturer=$($cpuDetails.Manufacturer); Caption=$($cpuDetails.Caption); $($cpuFamilyResult.Message)}"
|
||||
|
||||
if (!$cpuFamilyResult.IsValid) {
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$processorCheckFailed = $true
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
if ($processorCheckFailed) {
|
||||
$outObject.returnReason += $logFormatReturnReason -f $PROCESSOR_STRING
|
||||
$outObject.logging += $logFormatWithBlob -f $PROCESSOR_STRING, ($cpuDetailsLog), $FAIL_STRING
|
||||
}
|
||||
else {
|
||||
$outObject.logging += $logFormatWithBlob -f $PROCESSOR_STRING, ($cpuDetailsLog), $PASS_STRING
|
||||
UpdateReturnCode -ReturnCode 0
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormat -f $PROCESSOR_STRING, $PROCESSOR_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f "$($_.Exception.GetType().Name) $($_.Exception.Message)"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# SecureBooot
|
||||
try {
|
||||
$isSecureBootEnabled = Confirm-SecureBootUEFI
|
||||
$outObject.logging += $logFormatWithBlob -f $SECUREBOOT_STRING, $CAPABLE_STRING, $PASS_STRING
|
||||
UpdateReturnCode -ReturnCode 0
|
||||
}
|
||||
catch [System.PlatformNotSupportedException] {
|
||||
# PlatformNotSupportedException "Cmdlet not supported on this platform." - SecureBoot is not supported or is non-UEFI computer.
|
||||
UpdateReturnCode -ReturnCode 1
|
||||
$outObject.returnReason += $logFormatReturnReason -f $SECUREBOOT_STRING
|
||||
$outObject.logging += $logFormatWithBlob -f $SECUREBOOT_STRING, $NOT_CAPABLE_STRING, $FAIL_STRING
|
||||
$exitCode = 1
|
||||
}
|
||||
catch [System.UnauthorizedAccessException] {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormatWithBlob -f $SECUREBOOT_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f "$($_.Exception.GetType().Name) $($_.Exception.Message)"
|
||||
$exitCode = 1
|
||||
}
|
||||
catch {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormatWithBlob -f $SECUREBOOT_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f "$($_.Exception.GetType().Name) $($_.Exception.Message)"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# i7-7820hq CPU
|
||||
try {
|
||||
$supportedDevices = @('surface studio 2', 'precision 5520')
|
||||
$systemInfo = @(Get-WmiObject -Class Win32_ComputerSystem)[0]
|
||||
|
||||
if ($null -ne $cpuDetails) {
|
||||
if ($cpuDetails.Name -match 'i7-7820hq cpu @ 2.90ghz') {
|
||||
$modelOrSKUCheckLog = $systemInfo.Model.Trim()
|
||||
if ($supportedDevices -contains $modelOrSKUCheckLog) {
|
||||
$outObject.logging += $logFormatWithBlob -f $I7_7820HQ_CPU_STRING, $modelOrSKUCheckLog, $PASS_STRING
|
||||
$outObject.returnCode = 0
|
||||
$exitCode = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
if ($outObject.returnCode -ne 0) {
|
||||
UpdateReturnCode -ReturnCode -1
|
||||
$outObject.logging += $logFormatWithBlob -f $I7_7820HQ_CPU_STRING, $UNDETERMINED_STRING, $UNDETERMINED_CAPS_STRING
|
||||
$outObject.logging += $logFormatException -f "$($_.Exception.GetType().Name) $($_.Exception.Message)"
|
||||
$exitCode = 1
|
||||
}
|
||||
}
|
||||
|
||||
Switch ($outObject.returnCode) {
|
||||
|
||||
0 { $outObject.returnResult = $CAPABLE_CAPS_STRING }
|
||||
1 { $outObject.returnResult = $NOT_CAPABLE_CAPS_STRING }
|
||||
-1 { $outObject.returnResult = $UNDETERMINED_CAPS_STRING }
|
||||
-2 { $outObject.returnResult = $FAILED_TO_RUN_STRING }
|
||||
}
|
||||
|
||||
|
||||
if (0 -eq $outObject.returncode) {
|
||||
"Windows 11 Ready"
|
||||
}
|
||||
else {
|
||||
"Not Windows 11 Ready"
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user