Compare commits

..

44 Commits

Author SHA1 Message Date
wh1te909
5605c72253 Release 0.14.6 2022-08-09 21:47:20 +00:00
wh1te909
66bbcf0733 fix tests 2022-08-09 21:35:23 +00:00
wh1te909
acc23ea7bb bump versions 2022-08-09 21:18:41 +00:00
wh1te909
663bd0c9f0 remove dead code 2022-08-09 21:16:18 +00:00
wh1te909
39b1025dfa fix tests 2022-08-05 17:35:40 +00:00
sadnub
d2875e90b2 fix docker dev 2022-08-05 12:10:52 -04:00
wh1te909
ff461d1d02 fixes #1174 amidaware/tacticalrmm-web@76f330fb9c 2022-08-05 07:22:46 +00:00
wh1te909
58164ea2d3 dev 2022-08-05 05:57:38 +00:00
wh1te909
1bf4834004 Django 4.1 2022-08-04 23:43:57 +00:00
wh1te909
bf58d78281 fix return tuple formatting 2022-08-04 23:40:35 +00:00
wh1te909
0dc749bb3d Release 0.14.5 2022-08-01 22:57:01 +00:00
wh1te909
a8aedfde55 bump version 2022-08-01 22:56:21 +00:00
wh1te909
b174a89032 Release 0.14.4 2022-08-01 18:09:18 +00:00
wh1te909
9b92d1b673 bump version 2022-08-01 17:50:33 +00:00
wh1te909
febc9aed11 feat: run as user amidaware/tacticalrmm-web@137a5648ce amidaware/rmmagent@50cebb950d 2022-07-31 22:23:19 +00:00
wh1te909
de2462677e fix working dir 2022-07-31 21:31:58 +00:00
wh1te909
8bd94d46eb fix empty statement 2022-07-28 17:28:49 +00:00
wh1te909
d43cefe28f add file associations for yaml [skip ci] 2022-07-27 07:31:54 +00:00
wh1te909
b82874e261 back to develop 2022-07-27 07:30:53 +00:00
wh1te909
8554cb5d6c Release 0.14.3 2022-07-27 07:18:55 +00:00
wh1te909
f901614056 bump version 2022-07-27 06:11:41 +00:00
wh1te909
b555d217ab remove check 2022-07-27 06:10:33 +00:00
wh1te909
775c600234 docker nginx changes 2022-07-27 04:19:17 +00:00
wh1te909
128f2570b8 catch exception if mesh is down 2022-07-27 02:01:29 +00:00
wh1te909
3cd53e79b4 add signing key 2022-07-27 01:59:55 +00:00
wh1te909
ebba84ffda switch to official nginx repo to get latest version 2022-07-26 08:09:49 +00:00
wh1te909
1e1a42fe98 update web ver 2022-07-26 08:08:43 +00:00
wh1te909
8a744a440d update reqs 2022-07-26 07:47:21 +00:00
wh1te909
f4fc3c7d55 unused var 2022-07-26 04:39:11 +00:00
wh1te909
0594d121de add agent ver to status closes #1224 2022-07-24 01:18:54 +00:00
wh1te909
12c85d6234 start ansible role to deploy dev environment 2022-07-20 07:16:47 +00:00
wh1te909
5e37728f66 remove devskum 2022-07-18 17:22:24 +00:00
wh1te909
e8e19fede7 don't allow dates in past #1174 2022-07-18 08:04:15 +00:00
wh1te909
e565dbfa66 invalidate cache on policy script change 2022-07-12 20:16:07 +00:00
wh1te909
d180d6820c back to develop 2022-07-10 03:35:15 +00:00
wh1te909
7f252e9b7c Release 0.14.2 2022-07-10 03:34:02 +00:00
wh1te909
41db8681f8 no sudo 2022-07-10 00:38:09 +00:00
wh1te909
26cd58fd6d bump version 2022-07-10 00:16:32 +00:00
wh1te909
63c7e1aa9d update reqs 2022-07-10 00:16:13 +00:00
wh1te909
d5a6063e5e remove extra space 2022-07-09 09:31:05 +00:00
wh1te909
00affdbdec update supported version 2022-07-09 09:30:58 +00:00
wh1te909
db3f0bbd4f increase nginx open file limit 2022-07-09 08:09:24 +00:00
wh1te909
020a59cb97 remove un-needed expose 2022-07-09 08:08:09 +00:00
wh1te909
ff4fa6402d back to dev 2022-07-08 06:40:55 +00:00
45 changed files with 698 additions and 214 deletions

View File

@@ -22,22 +22,6 @@ services:
aliases: aliases:
- tactical-backend - tactical-backend
app-dev:
container_name: trmm-app-dev
image: node:16-alpine
restart: always
command: /bin/sh -c "npm install --cache ~/.npm && npm run serve"
user: 1000:1000
working_dir: /workspace/web
volumes:
- ..:/workspace:cached
ports:
- "8080:${APP_PORT}"
networks:
dev:
aliases:
- tactical-frontend
# nats # nats
nats-dev: nats-dev:
container_name: trmm-nats-dev container_name: trmm-nats-dev

View File

@@ -15,10 +15,7 @@ set -e
: "${MESH_PASS:=meshcentralpass}" : "${MESH_PASS:=meshcentralpass}"
: "${MESH_HOST:=tactical-meshcentral}" : "${MESH_HOST:=tactical-meshcentral}"
: "${API_HOST:=tactical-backend}" : "${API_HOST:=tactical-backend}"
: "${APP_HOST:=tactical-frontend}"
: "${REDIS_HOST:=tactical-redis}" : "${REDIS_HOST:=tactical-redis}"
: "${HTTP_PROTOCOL:=http}"
: "${APP_PORT:=8080}"
: "${API_PORT:=8000}" : "${API_PORT:=8000}"
: "${CERT_PRIV_PATH:=${TACTICAL_DIR}/certs/privkey.pem}" : "${CERT_PRIV_PATH:=${TACTICAL_DIR}/certs/privkey.pem}"
@@ -142,16 +139,6 @@ if [ "$1" = 'tactical-init-dev' ]; then
django_setup django_setup
# create .env file for frontend
webenv="$(cat << EOF
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
DEV_PORT = ${APP_PORT}
DOCKER_BUILD = 1
EOF
)"
echo "${webenv}" | tee "${WORKSPACE_DIR}"/web/.env > /dev/null
# chown everything to tactical user # chown everything to tactical user
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}" chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}" chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"

View File

@@ -1,41 +1,3 @@
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file -r /workspace/api/tacticalrmm/requirements.txt
asgiref==3.5.0 -r /workspace/api/tacticalrmm/requirements-dev.txt
celery==5.2.6 -r /workspace/api/tacticalrmm/requirements-test.txt
channels==3.0.4
channels_redis==3.4.0
daphne==3.0.2
Django==4.0.4
django-cors-headers==3.11.0
django-ipware==4.0.2
django-rest-knox==4.2.0
djangorestframework==3.13.1
future==0.18.2
msgpack==1.0.3
nats-py==2.1.0
packaging==21.3
psycopg2-binary==2.9.3
pycryptodome==3.14.1
pyotp==2.6.0
pytz==2022.1
qrcode==7.3.1
redis==4.2.2
requests==2.27.1
twilio==7.8.1
urllib3==1.26.9
validators==0.18.2
websockets==10.2
drf_spectacular==0.22.0
meshctrl==0.1.15
hiredis==2.0.0
# dev
black==22.3.0
django-extensions==3.1.5
isort==5.10.1
mypy==0.942
types-pytz==2021.3.6
model-bakery==1.5.0
coverage==6.3.2
django-silk==4.3.0
django-stubs==1.10.1
djangorestframework-stubs==1.5.0

View File

@@ -14,18 +14,18 @@ jobs:
name: Tests name: Tests
strategy: strategy:
matrix: matrix:
python-version: ['3.10.4'] python-version: ["3.10.4"]
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: harmon758/postgresql-action@v1 - uses: harmon758/postgresql-action@v1
with: with:
postgresql version: '14' postgresql version: "14"
postgresql db: 'pipeline' postgresql db: "pipeline"
postgresql user: 'pipeline' postgresql user: "pipeline"
postgresql password: 'pipeline123456' postgresql password: "pipeline123456"
- name: Setup Python ${{ matrix.python-version }} - name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v3 uses: actions/setup-python@v3
with: with:
@@ -49,13 +49,13 @@ jobs:
pip install -r requirements.txt -r requirements-test.txt pip install -r requirements.txt -r requirements-test.txt
- name: Codestyle black - name: Codestyle black
working-directory: api/tacticalrmm working-directory: api
run: | run: |
black --exclude migrations/ --check tacticalrmm black --exclude migrations/ --check tacticalrmm
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
exit 1 exit 1
fi fi
- name: Run django tests - name: Run django tests
env: env:
GHACTIONS: "yes" GHACTIONS: "yes"

View File

@@ -1,34 +0,0 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
name: DevSkim
on:
push:
branches: [ develop ]
pull_request:
branches: [ develop ]
schedule:
- cron: '19 5 * * 0'
jobs:
lint:
name: DevSkim
runs-on: ubuntu-20.04
permissions:
actions: read
contents: read
security-events: write
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Run DevSkim scanner
uses: microsoft/DevSkim-Action@v1
- name: Upload DevSkim scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v1
with:
sarif_file: devskim-results.sarif

View File

@@ -27,6 +27,10 @@
"editor.bracketPairColorization.enabled": true, "editor.bracketPairColorization.enabled": true,
"editor.guides.bracketPairs": true, "editor.guides.bracketPairs": true,
"editor.formatOnSave": true, "editor.formatOnSave": true,
"files.associations": {
"**/ansible/**/*.yml": "ansible",
"**/docker/**/docker-compose*.yml": "dockercompose"
},
"files.watcherExclude": { "files.watcherExclude": {
"files.watcherExclude": { "files.watcherExclude": {
"**/.git/objects/**": true, "**/.git/objects/**": true,

View File

@@ -4,8 +4,8 @@
| Version | Supported | | Version | Supported |
| ------- | ------------------ | | ------- | ------------------ |
| 0.12.2 | :white_check_mark: | | 0.14.1 | :white_check_mark: |
| < 0.12.2 | :x: | | < 0.14.1 | :x: |
## Reporting a Vulnerability ## Reporting a Vulnerability

3
ansible/README.md Normal file
View File

@@ -0,0 +1,3 @@
### tacticalrmm ansible WIP
ansible role to setup a Debian 11 VM for tacticalrmm local development

View File

@@ -0,0 +1,37 @@
---
user: "tactical"
python_ver: "3.10.4"
backend_repo: "https://github.com/amidaware/tacticalrmm.git"
frontend_repo: "https://github.com/amidaware/tacticalrmm-web.git"
scripts_repo: "https://github.com/amidaware/community-scripts.git"
backend_dir: "/opt/trmm"
frontend_dir: "/opt/trmm-web"
scripts_dir: "/opt/community-scripts"
trmm_dir: "/opt/trmm/api/tacticalrmm/tacticalrmm"
settings_file: "{{ trmm_dir }}/settings.py"
local_settings_file: "{{ trmm_dir }}/local_settings.py"
base_pkgs:
- build-essential
- curl
- wget
- dirmngr
- gnupg
- openssl
- gcc
- g++
- make
- ca-certificates
- redis
- git
python_pkgs:
- zlib1g-dev
- libncurses5-dev
- libgdbm-dev
- libnss3-dev
- libssl-dev
- libreadline-dev
- libffi-dev
- libsqlite3-dev
- libbz2-dev

View File

@@ -0,0 +1,25 @@
worker_rlimit_nofile 1000000;
user www-data;
worker_processes auto;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
events {
worker_connections 2048;
}
http {
sendfile on;
tcp_nopush on;
types_hash_max_size 2048;
server_names_hash_bucket_size 64;
include /etc/nginx/mime.types;
default_type application/octet-stream;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers on;
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
gzip on;
include /etc/nginx/conf.d/*.conf;
include /etc/nginx/sites-enabled/*;
}

View File

@@ -0,0 +1,20 @@
" This file loads the default vim options at the beginning and prevents
" that they are being loaded again later. All other options that will be set,
" are added, or overwrite the default settings. Add as many options as you
" whish at the end of this file.
" Load the defaults
source $VIMRUNTIME/defaults.vim
" Prevent the defaults from being loaded again later, if the user doesn't
" have a local vimrc (~/.vimrc)
let skip_defaults_vim = 1
" Set more options (overwrites settings from /usr/share/vim/vim80/defaults.vim)
" Add as many options as you whish
" Set the mouse mode to 'r'
if has('mouse')
set mouse=r
endif

View File

@@ -0,0 +1,253 @@
---
- name: set mouse mode for vim
tags: vim
become: yes
ansible.builtin.copy:
src: vimrc.local
dest: /etc/vim/vimrc.local
owner: "root"
group: "root"
mode: "0644"
- name: install base packages
tags: base
become: yes
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
update_cache: yes
with_items:
- "{{ base_pkgs }}"
- name: install python prereqs
tags: python
become: yes
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
with_items:
- "{{ python_pkgs }}"
- name: get cpu core count
tags: python
ansible.builtin.command: nproc
register: numprocs
- name: Create python tmpdir
tags: python
ansible.builtin.tempfile:
state: directory
suffix: python
register: python_tmp
- name: download and extract python
tags: python
ansible.builtin.unarchive:
src: "https://www.python.org/ftp/python/{{ python_ver }}/Python-{{ python_ver }}.tgz"
dest: "{{ python_tmp.path }}"
remote_src: yes
- name: compile python
tags: python
ansible.builtin.shell:
chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}"
cmd: |
./configure --enable-optimizations
make -j {{ numprocs.stdout }}
- name: alt install python
tags: python
become: yes
ansible.builtin.shell:
chdir: "{{ python_tmp.path }}/Python-{{ python_ver }}"
cmd: |
make altinstall
- name: install nginx
tags: nginx
become: yes
ansible.builtin.apt:
pkg: nginx
state: present
- name: set nginx default conf
tags: nginx
become: yes
ansible.builtin.copy:
src: nginx-default.conf
dest: /etc/nginx/nginx.conf
owner: "root"
group: "root"
mode: "0644"
- name: ensure nginx enabled and restarted
tags: nginx
become: yes
ansible.builtin.service:
name: nginx
enabled: yes
state: restarted
- name: create postgres repo
tags: postgres
become: yes
ansible.builtin.copy:
content: "deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main"
dest: /etc/apt/sources.list.d/pgdg.list
owner: root
group: root
mode: "0440"
- name: import postgres repo signing key
tags: postgres
become: yes
ansible.builtin.apt_key:
url: https://www.postgresql.org/media/keys/ACCC4CF8.asc
state: present
- name: install postgresql
tags: postgres
become: yes
ansible.builtin.apt:
pkg: postgresql-14
state: present
update_cache: yes
- name: ensure postgres enabled and started
tags: postgres
become: yes
ansible.builtin.service:
name: postgresql
enabled: yes
state: started
- name: setup database
tags: postgres
become: yes
become_user: postgres
ansible.builtin.shell:
cmd: |
psql -c "CREATE DATABASE tacticalrmm"
psql -c "CREATE USER {{ db_user }} WITH PASSWORD '{{ db_passwd }}'"
psql -c "ALTER ROLE {{ db_user }} SET client_encoding TO 'utf8'"
psql -c "ALTER ROLE {{ db_user }} SET default_transaction_isolation TO 'read committed'"
psql -c "ALTER ROLE {{ db_user }} SET timezone TO 'UTC'"
psql -c "ALTER ROLE {{ db_user }} CREATEDB"
psql -c "GRANT ALL PRIVILEGES ON DATABASE tacticalrmm TO {{ db_user }}"
- name: create repo dirs
become: yes
tags: git
ansible.builtin.file:
path: "{{ item }}"
state: directory
owner: "{{ user }}"
group: "{{ user }}"
mode: "0755"
with_items:
- "{{ backend_dir }}"
- "{{ frontend_dir }}"
- "{{ scripts_dir }}"
- name: git clone repos
tags: git
ansible.builtin.git:
repo: "{{ item.repo }}"
dest: "{{ item.dest }}"
version: "{{ item.version }}"
with_items:
- {
repo: "{{ backend_repo }}",
dest: "{{ backend_dir }}",
version: develop,
}
- {
repo: "{{ frontend_repo }}",
dest: "{{ frontend_dir }}",
version: develop,
}
- { repo: "{{ scripts_repo }}", dest: "{{ scripts_dir }}", version: main }
- name: get nats_server_ver
tags: nats
ansible.builtin.shell: grep "^NATS_SERVER_VER" {{ settings_file }} | awk -F'[= "]' '{print $5}'
register: nats_server_ver
- name: Create nats tmpdir
tags: nats
ansible.builtin.tempfile:
state: directory
suffix: nats
register: nats_tmp
- name: download and extract nats
tags: nats
ansible.builtin.unarchive:
src: "https://github.com/nats-io/nats-server/releases/download/v{{ nats_server_ver.stdout }}/nats-server-v{{ nats_server_ver.stdout }}-linux-amd64.tar.gz"
dest: "{{ nats_tmp.path }}"
remote_src: yes
- name: install nats
tags: nats
become: yes
ansible.builtin.copy:
remote_src: yes
src: "{{ nats_tmp.path }}/nats-server-v{{ nats_server_ver.stdout }}-linux-amd64/nats-server"
dest: /usr/local/bin/nats-server
owner: "{{ user }}"
group: "{{ user }}"
mode: "0755"
- name: Create nodejs tmpdir
tags: nodejs
ansible.builtin.tempfile:
state: directory
suffix: nodejs
register: nodejs_tmp
- name: download nodejs setup
tags: nodejs
ansible.builtin.get_url:
url: https://deb.nodesource.com/setup_16.x
dest: "{{ nodejs_tmp.path }}/setup_node.sh"
mode: "0755"
- name: run node setup script
tags: nodejs
become: yes
ansible.builtin.command:
cmd: "{{ nodejs_tmp.path }}/setup_node.sh"
- name: install nodejs
tags: nodejs
become: yes
ansible.builtin.apt:
pkg: nodejs
state: present
update_cache: yes
- name: update npm
tags: nodejs
become: yes
ansible.builtin.shell:
cmd: npm install -g npm
- name: deploy django local settings
tags: django
ansible.builtin.template:
src: local_settings.j2
dest: "{{ local_settings_file }}"
mode: "0644"
owner: "{{ user }}"
group: "{{ user }}"
- name: remove tempdirs
tags: cleanup
become: yes
ansible.builtin.file:
path: "{{ item }}"
state: absent
with_items:
- "{{ nats_tmp.path }}"
- "{{ python_tmp.path }}"
- "{{ nodejs_tmp.path }}"

View File

@@ -0,0 +1,19 @@
SECRET_KEY = "{{ django_secret }}"
DEBUG = True
ALLOWED_HOSTS = ['{{ api }}']
ADMIN_URL = "admin/"
CORS_ORIGIN_WHITELIST = [
"https://{{ rmm }}"
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'tacticalrmm',
'USER': '{{ db_user }}',
'PASSWORD': '{{ db_passwd }}',
'HOST': 'localhost',
'PORT': '5432',
}
}
REDIS_HOST = "localhost"
ADMIN_ENABLED = True

View File

@@ -0,0 +1,14 @@
---
api: 'api.example.com'
rmm: 'rmm.example.com'
mesh: 'mesh.example.com'
github_username: 'changeme'
github_email: 'changeme@example.com'
mesh_site: 'changeme'
mesh_user: 'changeme'
mesh_token: 'changeme'
db_user: 'changeme'
db_passwd: 'changeme'
django_secret: 'changeme'

6
ansible/setup_dev.yml Normal file
View File

@@ -0,0 +1,6 @@
---
- hosts: "{{ target }}"
vars:
ansible_user: tactical
roles:
- trmm_dev

View File

@@ -93,7 +93,7 @@ class LoginView(KnoxLoginView):
login(request, user) login(request, user)
# save ip information # save ip information
client_ip, is_routable = get_client_ip(request) client_ip, _ = get_client_ip(request)
user.last_login_ip = client_ip user.last_login_ip = client_ip
user.save() user.save()

View File

@@ -532,12 +532,17 @@ class Agent(BaseAuditModel):
wait: bool = False, wait: bool = False,
run_on_any: bool = False, run_on_any: bool = False,
history_pk: int = 0, history_pk: int = 0,
run_as_user: bool = False,
) -> Any: ) -> Any:
from scripts.models import Script from scripts.models import Script
script = Script.objects.get(pk=scriptpk) script = Script.objects.get(pk=scriptpk)
# always override if set on script model
if script.run_as_user:
run_as_user = True
parsed_args = script.parse_script_args(self, script.shell, args) parsed_args = script.parse_script_args(self, script.shell, args)
data = { data = {
@@ -548,6 +553,7 @@ class Agent(BaseAuditModel):
"code": script.code, "code": script.code,
"shell": script.shell, "shell": script.shell,
}, },
"run_as_user": run_as_user,
} }
if history_pk != 0: if history_pk != 0:
@@ -839,22 +845,22 @@ class Agent(BaseAuditModel):
asyncio.run( asyncio.run(
send_command_with_mesh(cmd, mesh_uri, self.mesh_node_id, shell, 0) send_command_with_mesh(cmd, mesh_uri, self.mesh_node_id, shell, 0)
) )
return ("ok", False) return "ok", False
elif mode == "mesh": elif mode == "mesh":
data = {"func": "recover", "payload": {"mode": mode}} data = {"func": "recover", "payload": {"mode": mode}}
if wait: if wait:
r = asyncio.run(self.nats_cmd(data, timeout=20)) r = asyncio.run(self.nats_cmd(data, timeout=20))
if r == "ok": if r == "ok":
return ("ok", False) return "ok", False
else: else:
return (str(r), True) return str(r), True
else: else:
asyncio.run(self.nats_cmd(data, timeout=20, wait=False)) asyncio.run(self.nats_cmd(data, timeout=20, wait=False))
return ("ok", False) return "ok", False
return ("invalid", True) return "invalid", True
@staticmethod @staticmethod
def serialize(agent: "Agent") -> Dict[str, Any]: def serialize(agent: "Agent") -> Dict[str, Any]:

View File

@@ -145,12 +145,6 @@ class AgentTableSerializer(serializers.ModelSerializer):
depth = 2 depth = 2
class WinAgentSerializer(serializers.ModelSerializer):
class Meta:
model = Agent
fields = "__all__"
class AgentHostnameSerializer(serializers.ModelSerializer): class AgentHostnameSerializer(serializers.ModelSerializer):
client = serializers.ReadOnlyField(source="client.name") client = serializers.ReadOnlyField(source="client.name")
site = serializers.ReadOnlyField(source="site.name") site = serializers.ReadOnlyField(source="site.name")

View File

@@ -153,6 +153,7 @@ def run_script_email_results_task(
emails: list[str], emails: list[str],
args: list[str] = [], args: list[str] = [],
history_pk: int = 0, history_pk: int = 0,
run_as_user: bool = False,
): ):
agent = Agent.objects.get(pk=agentpk) agent = Agent.objects.get(pk=agentpk)
script = Script.objects.get(pk=scriptpk) script = Script.objects.get(pk=scriptpk)
@@ -163,6 +164,7 @@ def run_script_email_results_task(
timeout=nats_timeout, timeout=nats_timeout,
wait=True, wait=True,
history_pk=history_pk, history_pk=history_pk,
run_as_user=run_as_user,
) )
if r == "timeout": if r == "timeout":
DebugLog.error( DebugLog.error(

View File

@@ -84,7 +84,7 @@ class TestAgentUpdate(TacticalTestCase):
site=self.site1, site=self.site1,
monitoring_type=AgentMonType.SERVER, monitoring_type=AgentMonType.SERVER,
plat=AgentPlat.WINDOWS, plat=AgentPlat.WINDOWS,
version="2.3.0", version="2.1.1",
) )
r = agent_noarch.do_update(token="", force=True) r = agent_noarch.do_update(token="", force=True)
self.assertEqual(r, "noarch") self.assertEqual(r, "noarch")
@@ -106,7 +106,7 @@ class TestAgentUpdate(TacticalTestCase):
site=self.site1, site=self.site1,
monitoring_type=AgentMonType.SERVER, monitoring_type=AgentMonType.SERVER,
plat=AgentPlat.WINDOWS, plat=AgentPlat.WINDOWS,
version="2.3.0", version="2.1.1",
goarch=GoArch.AMD64, goarch=GoArch.AMD64,
) )
@@ -115,7 +115,7 @@ class TestAgentUpdate(TacticalTestCase):
site=self.site3, site=self.site3,
monitoring_type=AgentMonType.WORKSTATION, monitoring_type=AgentMonType.WORKSTATION,
plat=AgentPlat.LINUX, plat=AgentPlat.LINUX,
version="2.3.0", version="2.1.1",
goarch=GoArch.ARM32, goarch=GoArch.ARM32,
) )
@@ -193,7 +193,7 @@ class TestAgentUpdate(TacticalTestCase):
site=self.site2, site=self.site2,
monitoring_type=AgentMonType.SERVER, monitoring_type=AgentMonType.SERVER,
plat=AgentPlat.WINDOWS, plat=AgentPlat.WINDOWS,
version="2.3.0", version="2.1.1",
goarch=GoArch.AMD64, goarch=GoArch.AMD64,
_quantity=6, _quantity=6,
) )
@@ -215,7 +215,7 @@ class TestAgentUpdate(TacticalTestCase):
site=self.site2, site=self.site2,
monitoring_type=AgentMonType.SERVER, monitoring_type=AgentMonType.SERVER,
plat=AgentPlat.WINDOWS, plat=AgentPlat.WINDOWS,
version="2.3.0", version="2.1.1",
goarch=GoArch.AMD64, goarch=GoArch.AMD64,
_quantity=7, _quantity=7,
) )

View File

@@ -403,6 +403,7 @@ class TestAgentViews(TacticalTestCase):
"cmd": "ipconfig", "cmd": "ipconfig",
"shell": "cmd", "shell": "cmd",
"timeout": 30, "timeout": 30,
"run_as_user": False,
} }
mock_ret.return_value = "nt authority\\system" mock_ret.return_value = "nt authority\\system"
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -417,16 +418,20 @@ class TestAgentViews(TacticalTestCase):
@patch("agents.models.Agent.nats_cmd") @patch("agents.models.Agent.nats_cmd")
def test_reboot_later(self, nats_cmd): def test_reboot_later(self, nats_cmd):
nats_cmd.return_value = "ok"
url = f"{base_url}/{self.agent.agent_id}/reboot/" url = f"{base_url}/{self.agent.agent_id}/reboot/"
data = { # ensure we don't allow dates in past
"datetime": "2025-08-29T18:41:02", data = {"datetime": "2022-07-11T01:51"}
} r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 400)
self.assertEqual(r.data, "Date cannot be set in the past")
nats_cmd.return_value = "ok" # test with date in future
data["datetime"] = "2027-08-29T18:41"
r = self.client.patch(url, data, format="json") r = self.client.patch(url, data, format="json")
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM") self.assertEqual(r.data["time"], "August 29, 2027 at 06:41 PM")
self.assertEqual(r.data["agent"], self.agent.hostname) self.assertEqual(r.data["agent"], self.agent.hostname)
nats_data = { nats_data = {
@@ -439,12 +444,12 @@ class TestAgentViews(TacticalTestCase):
"multiple_instances": 2, "multiple_instances": 2,
"trigger": "runonce", "trigger": "runonce",
"name": r.data["task_name"], "name": r.data["task_name"],
"start_year": 2025, "start_year": 2027,
"start_month": 8, "start_month": 8,
"start_day": 29, "start_day": 29,
"start_hour": 18, "start_hour": 18,
"start_min": 41, "start_min": 41,
"expire_year": 2025, "expire_year": 2027,
"expire_month": 8, "expire_month": 8,
"expire_day": 29, "expire_day": 29,
"expire_hour": 18, "expire_hour": 18,
@@ -534,6 +539,7 @@ class TestAgentViews(TacticalTestCase):
"output": "wait", "output": "wait",
"args": [], "args": [],
"timeout": 15, "timeout": 15,
"run_as_user": False,
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -543,7 +549,12 @@ class TestAgentViews(TacticalTestCase):
raise AgentHistory.DoesNotExist raise AgentHistory.DoesNotExist
run_script.assert_called_with( run_script.assert_called_with(
scriptpk=script.pk, args=[], timeout=18, wait=True, history_pk=hist.pk scriptpk=script.pk,
args=[],
timeout=18,
wait=True,
history_pk=hist.pk,
run_as_user=False,
) )
run_script.reset_mock() run_script.reset_mock()
@@ -555,6 +566,7 @@ class TestAgentViews(TacticalTestCase):
"timeout": 15, "timeout": 15,
"emailMode": "default", "emailMode": "default",
"emails": ["admin@example.com", "bob@example.com"], "emails": ["admin@example.com", "bob@example.com"],
"run_as_user": False,
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
@@ -564,6 +576,7 @@ class TestAgentViews(TacticalTestCase):
nats_timeout=18, nats_timeout=18,
emails=[], emails=[],
args=["abc", "123"], args=["abc", "123"],
run_as_user=False,
) )
email_task.reset_mock() email_task.reset_mock()
@@ -577,6 +590,7 @@ class TestAgentViews(TacticalTestCase):
nats_timeout=18, nats_timeout=18,
emails=["admin@example.com", "bob@example.com"], emails=["admin@example.com", "bob@example.com"],
args=["abc", "123"], args=["abc", "123"],
run_as_user=False,
) )
# test fire and forget # test fire and forget
@@ -585,6 +599,7 @@ class TestAgentViews(TacticalTestCase):
"output": "forget", "output": "forget",
"args": ["hello", "world"], "args": ["hello", "world"],
"timeout": 22, "timeout": 22,
"run_as_user": True,
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -594,7 +609,11 @@ class TestAgentViews(TacticalTestCase):
raise AgentHistory.DoesNotExist raise AgentHistory.DoesNotExist
run_script.assert_called_with( run_script.assert_called_with(
scriptpk=script.pk, args=["hello", "world"], timeout=25, history_pk=hist.pk scriptpk=script.pk,
args=["hello", "world"],
timeout=25,
history_pk=hist.pk,
run_as_user=True,
) )
run_script.reset_mock() run_script.reset_mock()
@@ -609,6 +628,7 @@ class TestAgentViews(TacticalTestCase):
"timeout": 22, "timeout": 22,
"custom_field": custom_field.pk, "custom_field": custom_field.pk,
"save_all_output": True, "save_all_output": True,
"run_as_user": False,
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -623,6 +643,7 @@ class TestAgentViews(TacticalTestCase):
timeout=25, timeout=25,
wait=True, wait=True,
history_pk=hist.pk, history_pk=hist.pk,
run_as_user=False,
) )
run_script.reset_mock() run_script.reset_mock()
@@ -640,6 +661,7 @@ class TestAgentViews(TacticalTestCase):
"timeout": 22, "timeout": 22,
"custom_field": custom_field.pk, "custom_field": custom_field.pk,
"save_all_output": False, "save_all_output": False,
"run_as_user": False,
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -654,6 +676,7 @@ class TestAgentViews(TacticalTestCase):
timeout=25, timeout=25,
wait=True, wait=True,
history_pk=hist.pk, history_pk=hist.pk,
run_as_user=False,
) )
run_script.reset_mock() run_script.reset_mock()
@@ -673,6 +696,7 @@ class TestAgentViews(TacticalTestCase):
"timeout": 22, "timeout": 22,
"custom_field": custom_field.pk, "custom_field": custom_field.pk,
"save_all_output": False, "save_all_output": False,
"run_as_user": False,
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -687,6 +711,7 @@ class TestAgentViews(TacticalTestCase):
timeout=25, timeout=25,
wait=True, wait=True,
history_pk=hist.pk, history_pk=hist.pk,
run_as_user=False,
) )
run_script.reset_mock() run_script.reset_mock()
@@ -703,6 +728,7 @@ class TestAgentViews(TacticalTestCase):
"output": "note", "output": "note",
"args": ["hello", "world"], "args": ["hello", "world"],
"timeout": 22, "timeout": 22,
"run_as_user": False,
} }
r = self.client.post(url, data, format="json") r = self.client.post(url, data, format="json")
@@ -717,6 +743,7 @@ class TestAgentViews(TacticalTestCase):
timeout=25, timeout=25,
wait=True, wait=True,
history_pk=hist.pk, history_pk=hist.pk,
run_as_user=False,
) )
run_script.reset_mock() run_script.reset_mock()

View File

@@ -35,11 +35,12 @@ from tacticalrmm.constants import (
AgentMonType, AgentMonType,
AgentPlat, AgentPlat,
CustomFieldModel, CustomFieldModel,
DebugLogType,
EvtLogNames, EvtLogNames,
PAAction, PAAction,
PAStatus, PAStatus,
) )
from tacticalrmm.helpers import notify_error from tacticalrmm.helpers import date_is_in_past, notify_error
from tacticalrmm.permissions import ( from tacticalrmm.permissions import (
_has_perm_on_agent, _has_perm_on_agent,
_has_perm_on_client, _has_perm_on_client,
@@ -225,8 +226,14 @@ class GetUpdateDeleteAgent(APIView):
mesh_id = agent.mesh_node_id mesh_id = agent.mesh_node_id
agent.delete() agent.delete()
reload_nats() reload_nats()
uri = get_mesh_ws_url() try:
asyncio.run(remove_mesh_agent(uri, mesh_id)) uri = get_mesh_ws_url()
asyncio.run(remove_mesh_agent(uri, mesh_id))
except Exception as e:
DebugLog.error(
message=f"Unable to remove agent {name} from meshcentral database: {str(e)}",
log_type=DebugLogType.AGENT_ISSUES,
)
return Response(f"{name} will now be uninstalled.") return Response(f"{name} will now be uninstalled.")
@@ -408,6 +415,7 @@ def send_raw_cmd(request, agent_id):
"command": request.data["cmd"], "command": request.data["cmd"],
"shell": shell, "shell": shell,
}, },
"run_as_user": request.data["run_as_user"],
} }
hist = AgentHistory.objects.create( hist = AgentHistory.objects.create(
@@ -452,10 +460,13 @@ class Reboot(APIView):
return notify_error(f"Not currently implemented for {agent.plat}") return notify_error(f"Not currently implemented for {agent.plat}")
try: try:
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%dT%H:%M:%S") obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%dT%H:%M")
except Exception: except Exception:
return notify_error("Invalid date") return notify_error("Invalid date")
if date_is_in_past(datetime_obj=obj, agent_tz=agent.timezone):
return notify_error("Date cannot be set in the past")
task_name = "TacticalRMM_SchedReboot_" + "".join( task_name = "TacticalRMM_SchedReboot_" + "".join(
random.choice(string.ascii_letters) for _ in range(10) random.choice(string.ascii_letters) for _ in range(10)
) )
@@ -681,6 +692,7 @@ def run_script(request, agent_id):
script = get_object_or_404(Script, pk=request.data["script"]) script = get_object_or_404(Script, pk=request.data["script"])
output = request.data["output"] output = request.data["output"]
args = request.data["args"] args = request.data["args"]
run_as_user: bool = request.data["run_as_user"]
req_timeout = int(request.data["timeout"]) + 3 req_timeout = int(request.data["timeout"]) + 3
AuditLog.audit_script_run( AuditLog.audit_script_run(
@@ -705,6 +717,7 @@ def run_script(request, agent_id):
timeout=req_timeout, timeout=req_timeout,
wait=True, wait=True,
history_pk=history_pk, history_pk=history_pk,
run_as_user=run_as_user,
) )
return Response(r) return Response(r)
@@ -718,6 +731,7 @@ def run_script(request, agent_id):
nats_timeout=req_timeout, nats_timeout=req_timeout,
emails=emails, emails=emails,
args=args, args=args,
run_as_user=run_as_user,
) )
elif output == "collector": elif output == "collector":
from core.models import CustomField from core.models import CustomField
@@ -728,6 +742,7 @@ def run_script(request, agent_id):
timeout=req_timeout, timeout=req_timeout,
wait=True, wait=True,
history_pk=history_pk, history_pk=history_pk,
run_as_user=run_as_user,
) )
custom_field = CustomField.objects.get(pk=request.data["custom_field"]) custom_field = CustomField.objects.get(pk=request.data["custom_field"])
@@ -756,13 +771,18 @@ def run_script(request, agent_id):
timeout=req_timeout, timeout=req_timeout,
wait=True, wait=True,
history_pk=history_pk, history_pk=history_pk,
run_as_user=run_as_user,
) )
Note.objects.create(agent=agent, user=request.user, note=r) Note.objects.create(agent=agent, user=request.user, note=r)
return Response(r) return Response(r)
else: else:
agent.run_script( agent.run_script(
scriptpk=script.pk, args=args, timeout=req_timeout, history_pk=history_pk scriptpk=script.pk,
args=args,
timeout=req_timeout,
history_pk=history_pk,
run_as_user=run_as_user,
) )
return Response(f"{script.name} will now be run on {agent.hostname}") return Response(f"{script.name} will now be run on {agent.hostname}")
@@ -897,7 +917,7 @@ def bulk(request):
shell, shell,
request.data["timeout"], request.data["timeout"],
request.user.username[:50], request.user.username[:50],
run_on_offline=request.data["offlineAgents"], request.data["run_as_user"],
) )
return Response(f"Command will now be run on {len(agents)} agents") return Response(f"Command will now be run on {len(agents)} agents")
@@ -909,6 +929,7 @@ def bulk(request):
request.data["args"], request.data["args"],
request.data["timeout"], request.data["timeout"],
request.user.username[:50], request.user.username[:50],
request.data["run_as_user"],
) )
return Response(f"{script.name} will now be run on {len(agents)} agents") return Response(f"{script.name} will now be run on {len(agents)} agents")

View File

@@ -469,6 +469,7 @@ class Alert(models.Model):
wait=True, wait=True,
full=True, full=True,
run_on_any=True, run_on_any=True,
run_as_user=False,
) )
# command was successful # command was successful
@@ -591,6 +592,7 @@ class Alert(models.Model):
wait=True, wait=True,
full=True, full=True,
run_on_any=True, run_on_any=True,
run_as_user=False,
) )
# command was successful # command was successful

View File

@@ -1424,6 +1424,7 @@ class TestAlertTasks(TacticalTestCase):
"timeout": 30, "timeout": 30,
"script_args": [], "script_args": [],
"payload": {"code": failure_action.code, "shell": failure_action.shell}, "payload": {"code": failure_action.code, "shell": failure_action.shell},
"run_as_user": False,
} }
nats_cmd.assert_called_with(data, timeout=30, wait=True) nats_cmd.assert_called_with(data, timeout=30, wait=True)
@@ -1452,6 +1453,7 @@ class TestAlertTasks(TacticalTestCase):
"timeout": 35, "timeout": 35,
"script_args": ["nice_arg"], "script_args": ["nice_arg"],
"payload": {"code": resolved_action.code, "shell": resolved_action.shell}, "payload": {"code": resolved_action.code, "shell": resolved_action.shell},
"run_as_user": False,
} }
nats_cmd.assert_called_with(data, timeout=35, wait=True) nats_cmd.assert_called_with(data, timeout=35, wait=True)

View File

@@ -241,6 +241,7 @@ class TaskGOGetSerializer(serializers.ModelSerializer):
), ),
"shell": script.shell, "shell": script.shell,
"timeout": action["timeout"], "timeout": action["timeout"],
"run_as_user": script.run_as_user,
} }
) )
if actions_to_remove: if actions_to_remove:

View File

@@ -182,10 +182,10 @@ class CoreSettings(BaseAuditModel):
test: bool = False, test: bool = False,
) -> tuple[str, bool]: ) -> tuple[str, bool]:
if test and not self.email_is_configured: if test and not self.email_is_configured:
return ("There needs to be at least one email recipient configured", False) return "There needs to be at least one email recipient configured", False
# return since email must be configured to continue # return since email must be configured to continue
elif not self.email_is_configured: elif not self.email_is_configured:
return ("SMTP messaging not configured.", False) return "SMTP messaging not configured.", False
# override email from if alert_template is passed and is set # override email from if alert_template is passed and is set
if alert_template and alert_template.email_from: if alert_template and alert_template.email_from:
@@ -199,7 +199,7 @@ class CoreSettings(BaseAuditModel):
elif self.email_alert_recipients: elif self.email_alert_recipients:
email_recipients = ", ".join(cast(List[str], self.email_alert_recipients)) email_recipients = ", ".join(cast(List[str], self.email_alert_recipients))
else: else:
return ("There needs to be at least one email recipient configured", False) return "There needs to be at least one email recipient configured", False
try: try:
msg = EmailMessage() msg = EmailMessage()
@@ -226,12 +226,12 @@ class CoreSettings(BaseAuditModel):
except Exception as e: except Exception as e:
DebugLog.error(message=f"Sending email failed with error: {e}") DebugLog.error(message=f"Sending email failed with error: {e}")
if test: if test:
return (str(e), False) return str(e), False
if test: if test:
return ("Email test ok!", True) return "Email test ok!", True
return ("ok", True) return "ok", True
def send_sms( def send_sms(
self, self,
@@ -240,7 +240,7 @@ class CoreSettings(BaseAuditModel):
test: bool = False, test: bool = False,
) -> tuple[str, bool]: ) -> tuple[str, bool]:
if not self.sms_is_configured: if not self.sms_is_configured:
return ("Sms alerting is not setup correctly.", False) return "Sms alerting is not setup correctly.", False
# override email recipients if alert_template is passed and is set # override email recipients if alert_template is passed and is set
if alert_template and alert_template.text_recipients: if alert_template and alert_template.text_recipients:
@@ -248,7 +248,7 @@ class CoreSettings(BaseAuditModel):
elif self.sms_alert_recipients: elif self.sms_alert_recipients:
text_recipients = cast(List[str], self.sms_alert_recipients) text_recipients = cast(List[str], self.sms_alert_recipients)
else: else:
return ("No sms recipients found", False) return "No sms recipients found", False
tw_client = TwClient(self.twilio_account_sid, self.twilio_auth_token) tw_client = TwClient(self.twilio_account_sid, self.twilio_auth_token)
for num in text_recipients: for num in text_recipients:
@@ -257,12 +257,12 @@ class CoreSettings(BaseAuditModel):
except TwilioRestException as e: except TwilioRestException as e:
DebugLog.error(message=f"SMS failed to send: {e}") DebugLog.error(message=f"SMS failed to send: {e}")
if test: if test:
return (str(e), False) return str(e), False
if test: if test:
return ("SMS Test sent successfully!", True) return "SMS Test sent successfully!", True
return ("ok", True) return "ok", True
@staticmethod @staticmethod
def serialize(core): def serialize(core):

View File

@@ -174,7 +174,7 @@ def _get_failing_data(agents: "QuerySet[Any]") -> Dict[str, bool]:
and task.task_result.status == TaskStatus.FAILING and task.task_result.status == TaskStatus.FAILING
and task.alert_severity == AlertSeverity.WARNING and task.alert_severity == AlertSeverity.WARNING
): ):
data["warning"] data["warning"] = True
return data return data

View File

@@ -424,6 +424,7 @@ def status(request):
ret = { ret = {
"version": settings.TRMM_VERSION, "version": settings.TRMM_VERSION,
"latest_agent_version": settings.LATEST_AGENT_VER,
"agent_count": Agent.objects.count(), "agent_count": Agent.objects.count(),
"client_count": Client.objects.count(), "client_count": Client.objects.count(),
"site_count": Site.objects.count(), "site_count": Site.objects.count(),

View File

@@ -1,11 +1,10 @@
import datetime as dt import datetime as dt
import pytz
from django.db.models.signals import post_init from django.db.models.signals import post_init
from django.dispatch import receiver from django.dispatch import receiver
from django.utils import timezone as djangotime
from tacticalrmm.constants import PAAction, PAStatus from tacticalrmm.constants import PAAction, PAStatus
from tacticalrmm.helpers import date_is_in_past
from .models import PendingAction from .models import PendingAction
@@ -22,14 +21,8 @@ def handle_status(sender, instance: PendingAction, **kwargs):
reboot_time = dt.datetime.strptime( reboot_time = dt.datetime.strptime(
instance.details["time"], "%Y-%m-%d %H:%M:%S" instance.details["time"], "%Y-%m-%d %H:%M:%S"
) )
if date_is_in_past(
# need to convert agent tz to UTC in order to compare datetime_obj=reboot_time, agent_tz=instance.agent.timezone
agent_tz = pytz.timezone(instance.agent.timezone) ):
localized = agent_tz.localize(reboot_time)
now = djangotime.now()
reboot_time_utc = localized.astimezone(pytz.utc)
if now > reboot_time_utc:
instance.status = PAStatus.COMPLETED instance.status = PAStatus.COMPLETED
instance.save(update_fields=["status"]) instance.save(update_fields=["status"])

View File

@@ -3,18 +3,18 @@ celery==5.2.7
certifi==2022.6.15 certifi==2022.6.15
cffi==1.15.1 cffi==1.15.1
channels==3.0.5 channels==3.0.5
channels_redis==3.4.0 channels_redis==3.4.1
chardet==4.0.0 chardet==4.0.0
cryptography==37.0.4 cryptography==37.0.4
daphne==3.0.2 daphne==3.0.2
Django==4.0.6 Django==4.1
django-cors-headers==3.13.0 django-cors-headers==3.13.0
django-ipware==4.0.2 django-ipware==4.0.2
django-rest-knox==4.2.0 django-rest-knox==4.2.0
djangorestframework==3.13.1 djangorestframework==3.13.1
future==0.18.2 future==0.18.2
msgpack==1.0.4 msgpack==1.0.4
nats-py==2.1.3 nats-py==2.1.4
psutil==5.9.1 psutil==5.9.1
psycopg2-binary==2.9.3 psycopg2-binary==2.9.3
pycparser==2.21 pycparser==2.21
@@ -28,12 +28,12 @@ hiredis==2.0.0
requests==2.28.1 requests==2.28.1
six==1.16.0 six==1.16.0
sqlparse==0.4.2 sqlparse==0.4.2
twilio==7.10.0 twilio==7.12.0
urllib3==1.26.9 urllib3==1.26.11
uWSGI==2.0.20 uWSGI==2.0.20
validators==0.20.0 validators==0.20.0
vine==5.0.0 vine==5.0.0
websockets==10.3 websockets==10.3
zipp==3.8.0 zipp==3.8.1
drf_spectacular==0.22.1 drf_spectacular==0.22.1
meshctrl==0.1.15 meshctrl==0.1.15

View File

@@ -0,0 +1,18 @@
# Generated by Django 4.0.6 on 2022-07-30 21:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scripts', '0017_auto_20220311_0100'),
]
operations = [
migrations.AddField(
model_name='script',
name='run_as_user',
field=models.BooleanField(default=False),
),
]

View File

@@ -40,6 +40,7 @@ class Script(BaseAuditModel):
supported_platforms = ArrayField( supported_platforms = ArrayField(
models.CharField(max_length=20), null=True, blank=True, default=list models.CharField(max_length=20), null=True, blank=True, default=list
) )
run_as_user = models.BooleanField(default=False)
def __str__(self): def __str__(self):
return self.name return self.name

View File

@@ -20,6 +20,7 @@ class ScriptTableSerializer(ModelSerializer):
"filename", "filename",
"hidden", "hidden",
"supported_platforms", "supported_platforms",
"run_as_user",
] ]
@@ -43,16 +44,17 @@ class ScriptSerializer(ModelSerializer):
"filename", "filename",
"hidden", "hidden",
"supported_platforms", "supported_platforms",
"run_as_user",
] ]
class ScriptCheckSerializer(ModelSerializer): class ScriptCheckSerializer(ModelSerializer):
code = ReadOnlyField() code = ReadOnlyField()
script_hash = ReadOnlyField script_hash = ReadOnlyField()
class Meta: class Meta:
model = Script model = Script
fields = ["code", "shell", "script_hash"] fields = ["code", "shell", "run_as_user", "script_hash"]
class ScriptSnippetSerializer(ModelSerializer): class ScriptSnippetSerializer(ModelSerializer):

View File

@@ -9,7 +9,12 @@ from tacticalrmm.constants import AgentHistoryType
@app.task @app.task
def handle_bulk_command_task( def handle_bulk_command_task(
agentpks, cmd, shell, timeout, username, run_on_offline=False agentpks: list[int],
cmd: str,
shell: str,
timeout,
username,
run_as_user: bool = False,
) -> None: ) -> None:
nats_data = { nats_data = {
"func": "rawcmd", "func": "rawcmd",
@@ -18,7 +23,9 @@ def handle_bulk_command_task(
"command": cmd, "command": cmd,
"shell": shell, "shell": shell,
}, },
"run_as_user": run_as_user,
} }
agent: "Agent"
for agent in Agent.objects.filter(pk__in=agentpks): for agent in Agent.objects.filter(pk__in=agentpks):
hist = AgentHistory.objects.create( hist = AgentHistory.objects.create(
agent=agent, agent=agent,
@@ -33,9 +40,15 @@ def handle_bulk_command_task(
@app.task @app.task
def handle_bulk_script_task( def handle_bulk_script_task(
scriptpk: int, agentpks: List[int], args: List[str], timeout: int, username: str scriptpk: int,
agentpks: List[int],
args: List[str],
timeout: int,
username: str,
run_as_user: bool = False,
) -> None: ) -> None:
script = Script.objects.get(pk=scriptpk) script = Script.objects.get(pk=scriptpk)
agent: "Agent"
for agent in Agent.objects.filter(pk__in=agentpks): for agent in Agent.objects.filter(pk__in=agentpks):
hist = AgentHistory.objects.create( hist = AgentHistory.objects.create(
agent=agent, agent=agent,
@@ -44,5 +57,9 @@ def handle_bulk_script_task(
username=username, username=username,
) )
agent.run_script( agent.run_script(
scriptpk=script.pk, args=args, timeout=timeout, history_pk=hist.pk scriptpk=script.pk,
args=args,
timeout=timeout,
history_pk=hist.pk,
run_as_user=run_as_user,
) )

View File

@@ -145,6 +145,7 @@ class TestScriptViews(TacticalTestCase):
"timeout": 90, "timeout": 90,
"args": [], "args": [],
"shell": ScriptShell.POWERSHELL, "shell": ScriptShell.POWERSHELL,
"run_as_user": False,
} }
resp = self.client.post(url, data, format="json") resp = self.client.post(url, data, format="json")

View File

@@ -17,6 +17,7 @@ from .serializers import (
ScriptSnippetSerializer, ScriptSnippetSerializer,
ScriptTableSerializer, ScriptTableSerializer,
) )
from core.utils import clear_entire_cache
class GetAddScripts(APIView): class GetAddScripts(APIView):
@@ -58,7 +59,7 @@ class GetUpdateDeleteScript(APIView):
return Response(ScriptSerializer(script).data) return Response(ScriptSerializer(script).data)
def put(self, request, pk): def put(self, request, pk):
script = get_object_or_404(Script, pk=pk) script = get_object_or_404(Script.objects.prefetch_related("script"), pk=pk)
data = request.data data = request.data
@@ -76,7 +77,12 @@ class GetUpdateDeleteScript(APIView):
serializer.is_valid(raise_exception=True) serializer.is_valid(raise_exception=True)
obj = serializer.save() obj = serializer.save()
# obj.hash_script_body() # TODO rename the related field from 'script' to 'scriptchecks' so it's not so confusing
if script.script.exists():
for script_check in script.script.all():
if script_check.policy:
clear_entire_cache()
break
return Response(f"{obj.name} was edited!") return Response(f"{obj.name} was edited!")
@@ -154,6 +160,7 @@ class TestScript(APIView):
"code": Script.replace_with_snippets(request.data["code"]), "code": Script.replace_with_snippets(request.data["code"]),
"shell": request.data["shell"], "shell": request.data["shell"],
}, },
"run_as_user": request.data["run_as_user"],
} }
r = asyncio.run( r = asyncio.run(

View File

@@ -26,7 +26,7 @@ def process_nats_response(data: Union[str, Dict]) -> Tuple[bool, bool, str]:
else "timeout" else "timeout"
) )
return (success, natserror, errormsg) return success, natserror, errormsg
class GetServices(APIView): class GetServices(APIView):

View File

@@ -61,4 +61,4 @@ class APIAuthentication(BaseAuthentication):
if apikey.expiration and apikey.expiration < djangotime.now(): if apikey.expiration and apikey.expiration < djangotime.now():
raise exceptions.AuthenticationFailed(_("The token as expired.")) raise exceptions.AuthenticationFailed(_("The token as expired."))
return (apikey.user, apikey.key) return apikey.user, apikey.key

View File

@@ -1,7 +1,14 @@
from typing import TYPE_CHECKING
import pytz
from django.conf import settings from django.conf import settings
from django.utils import timezone as djangotime
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
if TYPE_CHECKING:
from datetime import datetime
def get_certs() -> tuple[str, str]: def get_certs() -> tuple[str, str]:
domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1] domain = settings.ALLOWED_HOSTS[0].split(".", 1)[1]
@@ -12,7 +19,7 @@ def get_certs() -> tuple[str, str]:
cert_file = settings.CERT_FILE cert_file = settings.CERT_FILE
key_file = settings.KEY_FILE key_file = settings.KEY_FILE
return (cert_file, key_file) return cert_file, key_file
def notify_error(msg: str) -> Response: def notify_error(msg: str) -> Response:
@@ -26,4 +33,16 @@ def get_nats_ports() -> tuple[int, int]:
nats_standard_port = getattr(settings, "NATS_STANDARD_PORT", 4222) nats_standard_port = getattr(settings, "NATS_STANDARD_PORT", 4222)
nats_websocket_port = getattr(settings, "NATS_WEBSOCKET_PORT", 9235) nats_websocket_port = getattr(settings, "NATS_WEBSOCKET_PORT", 9235)
return (nats_standard_port, nats_websocket_port) return nats_standard_port, nats_websocket_port
def date_is_in_past(*, datetime_obj: "datetime", agent_tz: str) -> bool:
"""
datetime_obj must be a naive datetime
"""
now = djangotime.now()
# convert agent tz to UTC to compare
agent_pytz = pytz.timezone(agent_tz)
localized = agent_pytz.localize(datetime_obj)
utc_time = localized.astimezone(pytz.utc)
return now > utc_time

View File

@@ -17,19 +17,19 @@ LINUX_AGENT_SCRIPT = BASE_DIR / "core" / "agent_linux.sh"
AUTH_USER_MODEL = "accounts.User" AUTH_USER_MODEL = "accounts.User"
# latest release # latest release
TRMM_VERSION = "0.14.1" TRMM_VERSION = "0.14.6"
# https://github.com/amidaware/tacticalrmm-web # https://github.com/amidaware/tacticalrmm-web
WEB_VERSION = "0.100.4" WEB_VERSION = "0.100.8"
# bump this version everytime vue code is changed # bump this version everytime vue code is changed
# to alert user they need to manually refresh their browser # to alert user they need to manually refresh their browser
APP_VER = "0.0.165" APP_VER = "0.0.169"
# https://github.com/amidaware/rmmagent # https://github.com/amidaware/rmmagent
LATEST_AGENT_VER = "2.1.1" LATEST_AGENT_VER = "2.3.0"
MESH_VER = "1.0.43" MESH_VER = "1.0.60"
NATS_SERVER_VER = "2.8.4" NATS_SERVER_VER = "2.8.4"

View File

@@ -2,7 +2,7 @@
set -e set -e
: "${WORKER_CONNECTIONS:=2048}" : "${WORKER_CONNECTIONS:=4096}"
: "${APP_PORT:=8080}" : "${APP_PORT:=8080}"
: "${API_PORT:=8080}" : "${API_PORT:=8080}"
: "${NGINX_RESOLVER:=127.0.0.11}" : "${NGINX_RESOLVER:=127.0.0.11}"
@@ -31,9 +31,12 @@ else
fi fi
fi fi
nginxdefaultconf='/etc/nginx/nginx.conf'
# increase default nginx worker connections # increase default nginx worker connections
/bin/bash -c "sed -i 's/worker_connections.*/worker_connections ${WORKER_CONNECTIONS};/g' /etc/nginx/nginx.conf" /bin/bash -c "sed -i 's/worker_connections.*/worker_connections ${WORKER_CONNECTIONS};/g' $nginxdefaultconf"
sed -i '1s/^/worker_rlimit_nofile 1000000;\
/' $nginxdefaultconf
if [[ $DEV -eq 1 ]]; then if [[ $DEV -eq 1 ]]; then
API_NGINX=" API_NGINX="
@@ -113,7 +116,7 @@ server {
client_max_body_size 300M; client_max_body_size 300M;
listen 4443 ssl; listen 4443 ssl reuseport;
ssl_certificate ${CERT_PUB_PATH}; ssl_certificate ${CERT_PUB_PATH};
ssl_certificate_key ${CERT_PRIV_PATH}; ssl_certificate_key ${CERT_PRIV_PATH};

View File

@@ -86,7 +86,6 @@ services:
API_HOST: ${API_HOST} API_HOST: ${API_HOST}
ports: ports:
- "4222:4222" - "4222:4222"
- "9235:9235"
volumes: volumes:
- tactical_data:/opt/tactical - tactical_data:/opt/tactical
networks: networks:

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
SCRIPT_VERSION="64" SCRIPT_VERSION="66"
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/install.sh' SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/install.sh'
sudo apt install -y curl wget dirmngr gnupg lsb-release sudo apt install -y curl wget dirmngr gnupg lsb-release
@@ -172,10 +172,55 @@ sudo chmod 775 -R /etc/letsencrypt
print_green 'Installing Nginx' print_green 'Installing Nginx'
wget -qO - https://nginx.org/packages/keys/nginx_signing.key | sudo apt-key add -
nginxrepo="$(cat << EOF
deb https://nginx.org/packages/$osname/ $codename nginx
deb-src https://nginx.org/packages/$osname/ $codename nginx
EOF
)"
echo "${nginxrepo}" | sudo tee /etc/apt/sources.list.d/nginx.list > /dev/null
sudo apt update
sudo apt install -y nginx sudo apt install -y nginx
sudo systemctl stop nginx sudo systemctl stop nginx
sudo sed -i 's/worker_connections.*/worker_connections 2048;/g' /etc/nginx/nginx.conf
sudo sed -i 's/# server_names_hash_bucket_size.*/server_names_hash_bucket_size 64;/g' /etc/nginx/nginx.conf nginxdefaultconf='/etc/nginx/nginx.conf'
nginxconf="$(cat << EOF
worker_rlimit_nofile 1000000;
user www-data;
worker_processes auto;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
events {
worker_connections 4096;
}
http {
sendfile on;
tcp_nopush on;
types_hash_max_size 2048;
server_names_hash_bucket_size 64;
include /etc/nginx/mime.types;
default_type application/octet-stream;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers on;
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
gzip on;
include /etc/nginx/conf.d/*.conf;
include /etc/nginx/sites-enabled/*;
}
EOF
)"
echo "${nginxconf}" | sudo tee $nginxdefaultconf > /dev/null
for i in sites-available sites-enabled
do
sudo mkdir -p /etc/nginx/$i
done
print_green 'Installing NodeJS' print_green 'Installing NodeJS'
@@ -515,7 +560,7 @@ server {
} }
server { server {
listen 443 ssl; listen 443 ssl reuseport;
listen [::]:443 ssl; listen [::]:443 ssl;
server_name ${rmmdomain}; server_name ${rmmdomain};
client_max_body_size 300M; client_max_body_size 300M;

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
SCRIPT_VERSION="38" SCRIPT_VERSION="40"
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/restore.sh' SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/restore.sh'
sudo apt update sudo apt update
@@ -121,12 +121,22 @@ sudo npm install -g npm
print_green 'Restoring Nginx' print_green 'Restoring Nginx'
wget -qO - https://nginx.org/packages/keys/nginx_signing.key | sudo apt-key add -
nginxrepo="$(cat << EOF
deb https://nginx.org/packages/$osname/ $codename nginx
deb-src https://nginx.org/packages/$osname/ $codename nginx
EOF
)"
echo "${nginxrepo}" | sudo tee /etc/apt/sources.list.d/nginx.list > /dev/null
sudo apt update
sudo apt install -y nginx sudo apt install -y nginx
sudo systemctl stop nginx sudo systemctl stop nginx
sudo rm -rf /etc/nginx sudo rm -rf /etc/nginx
sudo mkdir /etc/nginx sudo mkdir /etc/nginx
sudo tar -xzf $tmp_dir/nginx/etc-nginx.tar.gz -C /etc/nginx sudo tar -xzf $tmp_dir/nginx/etc-nginx.tar.gz -C /etc/nginx
sudo sed -i 's/worker_connections.*/worker_connections 2048;/g' /etc/nginx/nginx.conf
rmmdomain=$(grep server_name /etc/nginx/sites-available/rmm.conf | grep -v 301 | head -1 | tr -d " \t" | sed 's/.*server_name//' | tr -d ';') rmmdomain=$(grep server_name /etc/nginx/sites-available/rmm.conf | grep -v 301 | head -1 | tr -d " \t" | sed 's/.*server_name//' | tr -d ';')
frontenddomain=$(grep server_name /etc/nginx/sites-available/frontend.conf | grep -v 301 | head -1 | tr -d " \t" | sed 's/.*server_name//' | tr -d ';') frontenddomain=$(grep server_name /etc/nginx/sites-available/frontend.conf | grep -v 301 | head -1 | tr -d " \t" | sed 's/.*server_name//' | tr -d ';')
meshdomain=$(grep server_name /etc/nginx/sites-available/meshcentral.conf | grep -v 301 | head -1 | tr -d " \t" | sed 's/.*server_name//' | tr -d ';') meshdomain=$(grep server_name /etc/nginx/sites-available/meshcentral.conf | grep -v 301 | head -1 | tr -d " \t" | sed 's/.*server_name//' | tr -d ';')

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
SCRIPT_VERSION="136" SCRIPT_VERSION="138"
SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/update.sh' SCRIPT_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/update.sh'
LATEST_SETTINGS_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/api/tacticalrmm/tacticalrmm/settings.py' LATEST_SETTINGS_URL='https://raw.githubusercontent.com/amidaware/tacticalrmm/master/api/tacticalrmm/tacticalrmm/settings.py'
YELLOW='\033[1;33m' YELLOW='\033[1;33m'
@@ -120,13 +120,6 @@ if ! [[ $CHECK_NATS_WEBSOCKET ]]; then
' $rmmconf)" | sudo tee $rmmconf > /dev/null ' $rmmconf)" | sudo tee $rmmconf > /dev/null
fi fi
if ! sudo nginx -t > /dev/null 2>&1; then
sudo nginx -t
echo -ne "\n"
echo -ne "${RED}You have syntax errors in your nginx configs. See errors above. Please fix them and re-run this script.${NC}\n"
echo -ne "${RED}Aborting...${NC}\n"
exit 1
fi
for i in nginx nats-api nats rmm daphne celery celerybeat for i in nginx nats-api nats rmm daphne celery celerybeat
do do
@@ -165,13 +158,53 @@ EOF
)" )"
echo "${uwsgini}" > /rmm/api/tacticalrmm/app.ini echo "${uwsgini}" > /rmm/api/tacticalrmm/app.ini
CHECK_NGINX_WORKER_CONN=$(grep "worker_connections 2048" /etc/nginx/nginx.conf)
if ! [[ $CHECK_NGINX_WORKER_CONN ]]; then if [ ! -f /etc/apt/sources.list.d/nginx.list ]; then
printf >&2 "${GREEN}Changing nginx worker connections to 2048${NC}\n" osname=$(lsb_release -si); osname=${osname^}
sudo sed -i 's/worker_connections.*/worker_connections 2048;/g' /etc/nginx/nginx.conf osname=$(echo "$osname" | tr '[A-Z]' '[a-z]')
codename=$(lsb_release -sc)
nginxrepo="$(cat << EOF
deb https://nginx.org/packages/$osname/ $codename nginx
deb-src https://nginx.org/packages/$osname/ $codename nginx
EOF
)"
echo "${nginxrepo}" | sudo tee /etc/apt/sources.list.d/nginx.list > /dev/null
wget -qO - https://nginx.org/packages/keys/nginx_signing.key | sudo apt-key add -
sudo apt update
sudo apt install -y nginx
fi fi
sudo sed -i 's/# server_names_hash_bucket_size.*/server_names_hash_bucket_size 64;/g' /etc/nginx/nginx.conf nginxdefaultconf='/etc/nginx/nginx.conf'
CHECK_NGINX_WORKER_CONN=$(grep "worker_connections 4096" $nginxdefaultconf)
if ! [[ $CHECK_NGINX_WORKER_CONN ]]; then
printf >&2 "${GREEN}Changing nginx worker connections to 4096${NC}\n"
sudo sed -i 's/worker_connections.*/worker_connections 4096;/g' $nginxdefaultconf
fi
CHECK_NGINX_NOLIMIT=$(grep "worker_rlimit_nofile 1000000" $nginxdefaultconf)
if ! [[ $CHECK_NGINX_NOLIMIT ]]; then
sudo sed -i '/worker_rlimit_nofile.*/d' $nginxdefaultconf
printf >&2 "${GREEN}Increasing nginx open file limit${NC}\n"
sudo sed -i '1s/^/worker_rlimit_nofile 1000000;\
/' $nginxdefaultconf
fi
backend_conf='/etc/nginx/sites-available/rmm.conf'
CHECK_NGINX_REUSEPORT=$(grep reuseport $backend_conf)
if ! [[ $CHECK_NGINX_REUSEPORT ]]; then
printf >&2 "${GREEN}Setting nginx reuseport${NC}\n"
sudo sed -i 's/listen 443 ssl;/listen 443 ssl reuseport;/g' $backend_conf
fi
sudo sed -i 's/# server_names_hash_bucket_size.*/server_names_hash_bucket_size 64;/g' $nginxdefaultconf
if ! sudo nginx -t > /dev/null 2>&1; then
sudo nginx -t
echo -ne "\n"
echo -ne "${RED}You have syntax errors in your nginx configs. See errors above. Please fix them and re-run this script.${NC}\n"
echo -ne "${RED}Aborting...${NC}\n"
exit 1
fi
HAS_PY310=$(python3.10 --version | grep ${PYTHON_VER}) HAS_PY310=$(python3.10 --version | grep ${PYTHON_VER})
if ! [[ $HAS_PY310 ]]; then if ! [[ $HAS_PY310 ]]; then