Compare commits
	
		
			215 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 6a55ca20f3 | ||
|  | c56c537f7f | ||
|  | fd7d776121 | ||
|  | 1af28190d8 | ||
|  | 6b305be567 | ||
|  | 3bf70513b7 | ||
|  | 7e64404654 | ||
|  | e1b5226f34 | ||
|  | 0d7128ad31 | ||
|  | 5778626087 | ||
|  | 3ff48756ed | ||
|  | 0ce9a6eeba | ||
|  | ad527b4aed | ||
|  | 6633bb452e | ||
|  | efeb0b4feb | ||
|  | 8cc11fc102 | ||
|  | ee6a167220 | ||
|  | 8d4ad3c405 | ||
|  | 072fbf4d60 | ||
|  | 727c41c283 | ||
|  | e2266838b6 | ||
|  | 775762d615 | ||
|  | 900c3008cb | ||
|  | 09379213a6 | ||
|  | ceb97048e3 | ||
|  | 4561515517 | ||
|  | a7b285759f | ||
|  | b4531b2a12 | ||
|  | 9e1d261c76 | ||
|  | e35fa15cd2 | ||
|  | dbd1f0d4f9 | ||
|  | 9ade78b703 | ||
|  | f20e244b5f | ||
|  | 0989308b7e | ||
|  | 12c7140536 | ||
|  | 2a0b605e92 | ||
|  | 6978890e6a | ||
|  | 561abd6cb9 | ||
|  | 4dd6227f0b | ||
|  | 1ec314c31c | ||
|  | a2be5a00be | ||
|  | 4e2241c115 | ||
|  | 8459bca64a | ||
|  | 24cb0565b9 | ||
|  | 9442acb028 | ||
|  | 4f7f181a42 | ||
|  | b7dd8737a7 | ||
|  | 2207eeb727 | ||
|  | 89dad7dfe7 | ||
|  | e5803d0cf3 | ||
|  | c1fffe9ae6 | ||
|  | 9e6cbd3d32 | ||
|  | 2ea8742510 | ||
|  | 5cfa0254f9 | ||
|  | 8cd2544f78 | ||
|  | c03b768364 | ||
|  | d60481ead4 | ||
|  | 126be3827d | ||
|  | 121274dca2 | ||
|  | 0ecf8da27e | ||
|  | 4a6bcb525d | ||
|  | 83f9ee50dd | ||
|  | 2bff297f79 | ||
|  | dee68f6933 | ||
|  | afa1e19c83 | ||
|  | 6052088eb4 | ||
|  | c7fa5167c4 | ||
|  | 1034b0b146 | ||
|  | 8bcc4e5945 | ||
|  | c3c24aa1db | ||
|  | 281c75d2d2 | ||
|  | 52307420f3 | ||
|  | 6185347cd8 | ||
|  | b6cd29f77e | ||
|  | b8ea8b1567 | ||
|  | 2f7dc98830 | ||
|  | e248a99f79 | ||
|  | 4fb6d9aa5d | ||
|  | f092ea8d67 | ||
|  | c32cbbdda6 | ||
|  | 2497675259 | ||
|  | 8d084ab90a | ||
|  | 2398773ef0 | ||
|  | a05998a30e | ||
|  | f863c29194 | ||
|  | d16a98c788 | ||
|  | 9421b02e96 | ||
|  | 10256864e4 | ||
|  | 85d010615d | ||
|  | cd1cb186be | ||
|  | 4458354d70 | ||
|  | 0f27da8808 | ||
|  | dd76bfa3c2 | ||
|  | 5780a66f7d | ||
|  | d4342c034c | ||
|  | 1ec43f2530 | ||
|  | 3c300d8fdf | ||
|  | 23119b55d1 | ||
|  | c8fb0e8f8a | ||
|  | 0ec32a77ef | ||
|  | 52921bfce8 | ||
|  | 960b929097 | ||
|  | d4ce23eced | ||
|  | 6925510f44 | ||
|  | 9827ad4c22 | ||
|  | ef8aaee028 | ||
|  | 3d7d39f248 | ||
|  | 3eac620560 | ||
|  | ab17006956 | ||
|  | bfc6889ee9 | ||
|  | 0ec0b4a044 | ||
|  | f1a523f327 | ||
|  | 4181449aea | ||
|  | e192f8db52 | ||
|  | 8097c681ac | ||
|  | f45938bdd5 | ||
|  | 6ea4e97eca | ||
|  | f274c8e837 | ||
|  | 335e571485 | ||
|  | a11616aace | ||
|  | 883acadbc4 | ||
|  | f51e6a3fcf | ||
|  | 371e081c0d | ||
|  | 6f41b3bf1c | ||
|  | c1d74a6c9e | ||
|  | 24eaa6796e | ||
|  | 1521e3b620 | ||
|  | b6ff38dd62 | ||
|  | 44ea9ac03c | ||
|  | 4c2701505b | ||
|  | 9022fe18da | ||
|  | 63be349f8b | ||
|  | c40256a290 | ||
|  | 33ecb8ec52 | ||
|  | 82d62a0015 | ||
|  | 6278240526 | ||
|  | 8c2dc5f57d | ||
|  | 2e5868778a | ||
|  | a10b8dab9b | ||
|  | 92f4f7ef59 | ||
|  | 31257bd5cb | ||
|  | bb6510862f | ||
|  | 797ecf0780 | ||
|  | f9536dc67f | ||
|  | e8b95362af | ||
|  | bdc39ad4ec | ||
|  | 4a202c5585 | ||
|  | 3c6b321f73 | ||
|  | cb29b52799 | ||
|  | 7e48015a54 | ||
|  | 9ed3abf932 | ||
|  | 61762828a3 | ||
|  | 59beabe5ac | ||
|  | 0b30faa28c | ||
|  | d12d49b93f | ||
|  | f1d64d275a | ||
|  | d094eeeb03 | ||
|  | be25af658e | ||
|  | 794f52c229 | ||
|  | 5d4dc4ed4c | ||
|  | e49d97b898 | ||
|  | b6b4f1ba62 | ||
|  | 653d476716 | ||
|  | 48b855258c | ||
|  | c7efdaf5f9 | ||
|  | 22523ed3d3 | ||
|  | 33c602dd61 | ||
|  | e2a5509b76 | ||
|  | 61a0fa1a89 | ||
|  | a35bd8292b | ||
|  | 06c8ae60e3 | ||
|  | deeab1f845 | ||
|  | da81c4c987 | ||
|  | d180f1b2d5 | ||
|  | 526135629c | ||
|  | 6b9493e057 | ||
|  | 9bb33d2afc | ||
|  | 7421138533 | ||
|  | d0800c52bb | ||
|  | 913fcd4df2 | ||
|  | 83322cc725 | ||
|  | 5944501feb | ||
|  | 17e3603d3d | ||
|  | 95be43ae47 | ||
|  | feb91cbbaa | ||
|  | 79409af168 | ||
|  | 5dbfb64822 | ||
|  | 5e7ebf5e69 | ||
|  | e73215ca74 | ||
|  | a5f123b9ce | ||
|  | ac058e9675 | ||
|  | 371b764d1d | ||
|  | 66d7172e09 | ||
|  | 99d3a8a749 | ||
|  | db5ff372a4 | ||
|  | 3fe83f81be | ||
|  | 669e638fd6 | ||
|  | f1f999f3b6 | ||
|  | 6f3b6fa9ce | ||
|  | 938f945301 | ||
|  | e3efb2aad6 | ||
|  | 1e678c0d78 | ||
|  | a59c111140 | ||
|  | a8b2a31bed | ||
|  | 37402f9ee8 | ||
|  | e7b5ecb40f | ||
|  | c817ef04b9 | ||
|  | f52b18439c | ||
|  | 1e03c628d5 | ||
|  | 71fb39db1f | ||
|  | bcfb3726b0 | ||
|  | c6e9e29671 | ||
|  | 1bfefcce39 | ||
|  | 22488e93e1 | ||
|  | 244b89f035 | 
| @@ -23,6 +23,6 @@ POSTGRES_USER=postgres | ||||
| POSTGRES_PASS=postgrespass | ||||
|  | ||||
| # DEV SETTINGS | ||||
| APP_PORT=8000 | ||||
| API_PORT=8080 | ||||
| APP_PORT=80 | ||||
| API_PORT=80 | ||||
| HTTP_PROTOCOL=https | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| FROM python:3.8-slim | ||||
| FROM python:3.9.2-slim | ||||
|  | ||||
| ENV TACTICAL_DIR /opt/tactical | ||||
| ENV TACTICAL_GO_DIR /usr/local/rmmgo | ||||
| @@ -15,7 +15,7 @@ RUN groupadd -g 1000 tactical && \ | ||||
|     useradd -u 1000 -g 1000 tactical | ||||
|  | ||||
| # Copy Go Files | ||||
| COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go | ||||
| COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go | ||||
|  | ||||
| # Copy Dev python reqs | ||||
| COPY ./requirements.txt / | ||||
|   | ||||
| @@ -3,6 +3,7 @@ version: '3.4' | ||||
| services: | ||||
|   api-dev: | ||||
|     image: api-dev | ||||
|     restart: always | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: ./api.dockerfile | ||||
| @@ -21,6 +22,7 @@ services: | ||||
|  | ||||
|   app-dev: | ||||
|     image: node:12-alpine | ||||
|     restart: always | ||||
|     command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}" | ||||
|     working_dir: /workspace/web | ||||
|     volumes: | ||||
|   | ||||
| @@ -45,7 +45,7 @@ function django_setup { | ||||
|   echo "setting up django environment" | ||||
|  | ||||
|   # configure django settings | ||||
|   MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token) | ||||
|   MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)" | ||||
|  | ||||
|   DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1) | ||||
|    | ||||
| @@ -100,35 +100,35 @@ MESH_USERNAME = '${MESH_USER}' | ||||
| MESH_SITE = 'https://${MESH_HOST}' | ||||
| MESH_TOKEN_KEY = '${MESH_TOKEN}' | ||||
| REDIS_HOST    = '${REDIS_HOST}' | ||||
| ADMIN_ENABLED = True | ||||
| EOF | ||||
| )" | ||||
|  | ||||
|   echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py | ||||
|  | ||||
|   # run migrations and init scripts | ||||
|   python manage.py migrate --no-input | ||||
|   python manage.py collectstatic --no-input | ||||
|   python manage.py initial_db_setup | ||||
|   python manage.py initial_mesh_setup | ||||
|   python manage.py load_chocos | ||||
|   python manage.py load_community_scripts | ||||
|   python manage.py reload_nats | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py load_chocos | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py reload_nats | ||||
|  | ||||
|   # create super user  | ||||
|   echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell | ||||
|  | ||||
| } | ||||
|  | ||||
| if [ "$1" = 'tactical-init-dev' ]; then | ||||
|  | ||||
|   # make directories if they don't exist | ||||
|   mkdir -p ${TACTICAL_DIR}/tmp | ||||
|   mkdir -p "${TACTICAL_DIR}/tmp" | ||||
|  | ||||
|   test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}" | ||||
|  | ||||
|   # setup Python virtual env and install dependencies | ||||
|   test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV} | ||||
|   pip install --no-cache-dir -r /requirements.txt | ||||
|   ! test -e "${VIRTUAL_ENV}" && python -m venv ${VIRTUAL_ENV} | ||||
|   "${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt | ||||
|  | ||||
|   django_setup | ||||
|  | ||||
| @@ -150,20 +150,20 @@ EOF | ||||
| fi | ||||
|  | ||||
| if [ "$1" = 'tactical-api' ]; then | ||||
|   cp ${WORKSPACE_DIR}/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo | ||||
|   cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo | ||||
|   chmod +x /usr/local/bin/goversioninfo | ||||
|    | ||||
|   check_tactical_ready | ||||
|   python manage.py runserver 0.0.0.0:${API_PORT} | ||||
|   "${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}" | ||||
| fi | ||||
|  | ||||
| if [ "$1" = 'tactical-celery-dev' ]; then | ||||
|   check_tactical_ready | ||||
|   env/bin/celery -A tacticalrmm worker -l debug | ||||
|   "${VIRTUAL_ENV}"/bin/celery -A tacticalrmm worker -l debug | ||||
| fi | ||||
|  | ||||
| if [ "$1" = 'tactical-celerybeat-dev' ]; then | ||||
|   check_tactical_ready | ||||
|   test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" | ||||
|   env/bin/celery -A tacticalrmm beat -l debug | ||||
|   "${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug | ||||
| fi | ||||
|   | ||||
| @@ -1,44 +1,32 @@ | ||||
| # To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file | ||||
| amqp==2.6.1 | ||||
| asgiref==3.3.1 | ||||
| asyncio-nats-client==0.11.4 | ||||
| billiard==3.6.3.0 | ||||
| celery==4.4.6 | ||||
| certifi==2020.12.5 | ||||
| cffi==1.14.3 | ||||
| chardet==3.0.4 | ||||
| cryptography==3.2.1 | ||||
| decorator==4.4.2 | ||||
| Django==3.1.4 | ||||
| django-cors-headers==3.5.0 | ||||
| django-rest-knox==4.1.0 | ||||
| djangorestframework==3.12.2 | ||||
| future==0.18.2 | ||||
| idna==2.10 | ||||
| kombu==4.6.11 | ||||
| loguru==0.5.3 | ||||
| msgpack==1.0.0 | ||||
| packaging==20.4 | ||||
| psycopg2-binary==2.8.6 | ||||
| pycparser==2.20 | ||||
| pycryptodome==3.9.9 | ||||
| pyotp==2.4.1 | ||||
| pyparsing==2.4.7 | ||||
| pytz==2020.4 | ||||
| qrcode==6.1 | ||||
| redis==3.5.3 | ||||
| requests==2.25.0 | ||||
| six==1.15.0 | ||||
| sqlparse==0.4.1 | ||||
| twilio==6.49.0 | ||||
| urllib3==1.26.2 | ||||
| validators==0.18.1 | ||||
| vine==1.3.0 | ||||
| websockets==8.1 | ||||
| zipp==3.4.0 | ||||
| asyncio-nats-client | ||||
| celery | ||||
| Django | ||||
| django-cors-headers | ||||
| django-rest-knox | ||||
| djangorestframework | ||||
| loguru | ||||
| msgpack | ||||
| psycopg2-binary | ||||
| pycparser | ||||
| pycryptodome | ||||
| pyotp | ||||
| pyparsing | ||||
| pytz | ||||
| qrcode | ||||
| redis | ||||
| twilio | ||||
| packaging | ||||
| validators | ||||
| websockets | ||||
| black | ||||
| Werkzeug | ||||
| django-extensions | ||||
| coverage | ||||
| coveralls | ||||
| model_bakery | ||||
| mkdocs | ||||
| mkdocs-material | ||||
| pymdown-extensions | ||||
| Pygments | ||||
| mypy | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @@ -3,7 +3,7 @@ | ||||
| github: wh1te909 | ||||
| patreon: # Replace with a single Patreon username | ||||
| open_collective: # Replace with a single Open Collective username | ||||
| ko_fi: # Replace with a single Ko-fi username | ||||
| ko_fi: tacticalrmm | ||||
| tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel | ||||
| community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry | ||||
| liberapay: # Replace with a single Liberapay username | ||||
|   | ||||
							
								
								
									
										22
									
								
								.github/workflows/deploy-docs.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								.github/workflows/deploy-docs.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| name: Deploy Docs | ||||
| on: | ||||
|   push: | ||||
|     branches: | ||||
|       - develop | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     working-directory: docs | ||||
|  | ||||
| jobs: | ||||
|   deploy: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: 3.x | ||||
|       - run: pip install --upgrade pip | ||||
|       - run: pip install --upgrade setuptools wheel | ||||
|       - run: pip install mkdocs mkdocs-material pymdown-extensions | ||||
|       - run: mkdocs gh-deploy --force | ||||
							
								
								
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -45,3 +45,5 @@ htmlcov/ | ||||
| docker-compose.dev.yml | ||||
| docs/.vuepress/dist | ||||
| nats-rmm.conf | ||||
| .mypy_cache | ||||
| docs/site/ | ||||
|   | ||||
							
								
								
									
										7
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @@ -3,7 +3,14 @@ | ||||
|     "python.languageServer": "Pylance", | ||||
|     "python.analysis.extraPaths": [ | ||||
|         "api/tacticalrmm", | ||||
|         "api/env", | ||||
|     ], | ||||
|     "python.analysis.diagnosticSeverityOverrides": { | ||||
|         "reportUnusedImport": "error", | ||||
|         "reportDuplicateImport": "error", | ||||
|     }, | ||||
|     "python.analysis.memory.keepLibraryAst": true, | ||||
|     "python.linting.mypyEnabled": true, | ||||
|     "python.analysis.typeCheckingMode": "basic", | ||||
|     "python.formatting.provider": "black", | ||||
|     "editor.formatOnSave": true, | ||||
|   | ||||
							
								
								
									
										98
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										98
									
								
								README.md
									
									
									
									
									
								
							| @@ -15,6 +15,8 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso | ||||
|  | ||||
| ### [Discord Chat](https://discord.gg/upGTkWp) | ||||
|  | ||||
| ### [Documentation](https://wh1te909.github.io/tacticalrmm/) | ||||
|  | ||||
| ## Features | ||||
|  | ||||
| - Teamviewer-like remote desktop control | ||||
| @@ -33,98 +35,6 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso | ||||
|  | ||||
| - Windows 7, 8.1, 10, Server 2008R2, 2012R2, 2016, 2019 | ||||
|  | ||||
| ## Installation | ||||
| ## Installation / Backup / Restore / Usage | ||||
|  | ||||
| ### Requirements | ||||
| - VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10) | ||||
| - A domain you own with at least 3 subdomains | ||||
| - Google Authenticator app (2 factor is NOT optional) | ||||
|  | ||||
| ### Docker | ||||
| Refer to the [docker setup](docker/readme.md) | ||||
|  | ||||
|  | ||||
| ### Installation example (Ubuntu server 20.04 LTS) | ||||
|  | ||||
| Fresh VPS with latest updates\ | ||||
| login as root and create a user and add to sudoers group (we will be creating a user called tactical) | ||||
| ``` | ||||
| apt update && apt -y upgrade | ||||
| adduser tactical | ||||
| usermod -a -G sudo tactical | ||||
| ``` | ||||
|  | ||||
| switch to the tactical user and setup the firewall | ||||
| ``` | ||||
| su - tactical | ||||
| sudo ufw default deny incoming | ||||
| sudo ufw default allow outgoing | ||||
| sudo ufw allow ssh | ||||
| sudo ufw allow http | ||||
| sudo ufw allow https | ||||
| sudo ufw allow proto tcp from any to any port 4222 | ||||
| sudo ufw enable && sudo ufw reload | ||||
| ``` | ||||
|  | ||||
| Our domain for this example is tacticalrmm.com | ||||
|  | ||||
| In the DNS manager of wherever our domain is hosted, we will create three A records, all pointing to the public IP address of our VPS | ||||
|  | ||||
| Create A record ```api.tacticalrmm.com``` for the django rest backend\ | ||||
| Create A record ```rmm.tacticalrmm.com``` for the vue frontend\ | ||||
| Create A record ```mesh.tacticalrmm.com``` for meshcentral | ||||
|  | ||||
| Download the install script and run it | ||||
|  | ||||
| ``` | ||||
| wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh | ||||
| chmod +x install.sh | ||||
| ./install.sh | ||||
| ``` | ||||
|  | ||||
|  Links will be provided at the end of the install script.\ | ||||
|  Download the executable from the first link, then open ```rmm.tacticalrmm.com``` and login.\ | ||||
|  Upload the executable when prompted during the initial setup page. | ||||
|  | ||||
|  | ||||
| ### Install an agent | ||||
| From the app's dashboard, choose Agents > Install Agent to generate an installer. | ||||
|  | ||||
| ## Updating | ||||
| Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh) | ||||
| ``` | ||||
| wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh | ||||
| chmod +x update.sh | ||||
| ./update.sh | ||||
| ``` | ||||
|  | ||||
| ## Backup | ||||
| Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh) | ||||
| ``` | ||||
| wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh | ||||
| ``` | ||||
| Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section) | ||||
|  | ||||
| Run it | ||||
| ``` | ||||
| chmod +x backup.sh | ||||
| ./backup.sh | ||||
| ``` | ||||
|  | ||||
| ## Restore | ||||
| Change your 3 A records to point to new server's public IP | ||||
|  | ||||
| Create same linux user account as old server and add to sudoers group and setup firewall (see install instructions above) | ||||
|  | ||||
| Copy backup file to new server | ||||
|  | ||||
| Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps. | ||||
| ``` | ||||
| wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh | ||||
| ``` | ||||
|  | ||||
| Run the restore script, passing it the backup tar file as the first argument | ||||
| ``` | ||||
| chmod +x restore.sh | ||||
| ./restore.sh rmm-backup-xxxxxxx.tar | ||||
| ``` | ||||
| ### Refer to the [documentation](https://wh1te909.github.io/tacticalrmm/) | ||||
| @@ -1,5 +1,4 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from rest_framework.authtoken.admin import TokenAdmin | ||||
|  | ||||
| from .models import User | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils import timezone as djangotime | ||||
| from knox.models import AuthToken | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,13 @@ | ||||
| import pyotp | ||||
| import subprocess | ||||
|  | ||||
| import pyotp | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from accounts.models import User | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generates barcode for Google Authenticator and creates totp for user" | ||||
|     help = "Generates barcode for Authenticator and creates totp for user" | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("code", type=str) | ||||
| @@ -24,12 +26,10 @@ class Command(BaseCommand): | ||||
|         url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain) | ||||
|         subprocess.run(f'qr "{url}"', shell=True) | ||||
|         self.stdout.write( | ||||
|             self.style.SUCCESS( | ||||
|                 "Scan the barcode above with your google authenticator app" | ||||
|             ) | ||||
|             self.style.SUCCESS("Scan the barcode above with your authenticator app") | ||||
|         ) | ||||
|         self.stdout.write( | ||||
|             self.style.SUCCESS( | ||||
|                 f"If that doesn't work you may manually enter the key: {code}" | ||||
|                 f"If that doesn't work you may manually enter the setup key: {code}" | ||||
|             ) | ||||
|         ) | ||||
|   | ||||
							
								
								
									
										57
									
								
								api/tacticalrmm/accounts/management/commands/reset_2fa.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								api/tacticalrmm/accounts/management/commands/reset_2fa.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | ||||
| import os | ||||
| import subprocess | ||||
|  | ||||
| import pyotp | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from accounts.models import User | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Reset 2fa" | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("username", type=str) | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         username = kwargs["username"] | ||||
|         try: | ||||
|             user = User.objects.get(username=username) | ||||
|         except User.DoesNotExist: | ||||
|             self.stdout.write(self.style.ERROR(f"User {username} doesn't exist")) | ||||
|             return | ||||
|  | ||||
|         domain = "Tactical RMM" | ||||
|         nginx = "/etc/nginx/sites-available/frontend.conf" | ||||
|         found = None | ||||
|         if os.path.exists(nginx): | ||||
|             try: | ||||
|                 with open(nginx, "r") as f: | ||||
|                     for line in f: | ||||
|                         if "server_name" in line: | ||||
|                             found = line | ||||
|                             break | ||||
|  | ||||
|                 if found: | ||||
|                     rep = found.replace("server_name", "").replace(";", "") | ||||
|                     domain = "".join(rep.split()) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         code = pyotp.random_base32() | ||||
|         user.totp_key = code | ||||
|         user.save(update_fields=["totp_key"]) | ||||
|  | ||||
|         url = pyotp.totp.TOTP(code).provisioning_uri(username, issuer_name=domain) | ||||
|         subprocess.run(f'qr "{url}"', shell=True) | ||||
|         self.stdout.write( | ||||
|             self.style.WARNING("Scan the barcode above with your authenticator app") | ||||
|         ) | ||||
|         self.stdout.write( | ||||
|             self.style.WARNING( | ||||
|                 f"If that doesn't work you may manually enter the setup key: {code}" | ||||
|             ) | ||||
|         ) | ||||
|         self.stdout.write( | ||||
|             self.style.SUCCESS(f"2fa was successfully reset for user {username}") | ||||
|         ) | ||||
| @@ -0,0 +1,22 @@ | ||||
| from django.core.management.base import BaseCommand | ||||
| from accounts.models import User | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Reset password for user" | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("username", type=str) | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         username = kwargs["username"] | ||||
|         try: | ||||
|             user = User.objects.get(username=username) | ||||
|         except User.DoesNotExist: | ||||
|             self.stdout.write(self.style.ERROR(f"User {username} doesn't exist")) | ||||
|             return | ||||
|  | ||||
|         passwd = input("Enter new password: ") | ||||
|         user.set_password(passwd) | ||||
|         user.save() | ||||
|         self.stdout.write(self.style.SUCCESS(f"Password for {username} was reset!")) | ||||
| @@ -2,8 +2,8 @@ | ||||
|  | ||||
| import django.contrib.auth.models | ||||
| import django.contrib.auth.validators | ||||
| from django.db import migrations, models | ||||
| import django.utils.timezone | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.1.2 on 2020-11-10 20:24 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.7 on 2021-02-28 06:38 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('accounts', '0011_user_default_agent_tbl_tab'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='user', | ||||
|             name='agents_per_page', | ||||
|             field=models.PositiveIntegerField(default=50), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,5 +1,5 @@ | ||||
| from django.db import models | ||||
| from django.contrib.auth.models import AbstractUser | ||||
| from django.db import models | ||||
|  | ||||
| from logs.models import BaseAuditModel | ||||
|  | ||||
| @@ -27,6 +27,7 @@ class User(AbstractUser, BaseAuditModel): | ||||
|     default_agent_tbl_tab = models.CharField( | ||||
|         max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server" | ||||
|     ) | ||||
|     agents_per_page = models.PositiveIntegerField(default=50) | ||||
|  | ||||
|     agent = models.OneToOneField( | ||||
|         "agents.Agent", | ||||
|   | ||||
| @@ -1,9 +1,5 @@ | ||||
| import pyotp | ||||
|  | ||||
| from rest_framework.serializers import ( | ||||
|     ModelSerializer, | ||||
|     SerializerMethodField, | ||||
| ) | ||||
| from rest_framework.serializers import ModelSerializer, SerializerMethodField | ||||
|  | ||||
| from .models import User | ||||
|  | ||||
|   | ||||
| @@ -1,8 +1,9 @@ | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from django.test import override_settings | ||||
|  | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from accounts.models import User | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
|  | ||||
| class TestAccounts(TacticalTestCase): | ||||
| @@ -282,6 +283,7 @@ class TestUserAction(TacticalTestCase): | ||||
|             "userui": True, | ||||
|             "agent_dblclick_action": "editagent", | ||||
|             "default_agent_tbl_tab": "mixed", | ||||
|             "agents_per_page": 1000, | ||||
|         } | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|   | ||||
| @@ -1,23 +1,20 @@ | ||||
| import pyotp | ||||
|  | ||||
| from django.contrib.auth import login | ||||
| from django.conf import settings | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.contrib.auth import login | ||||
| from django.db import IntegrityError | ||||
|  | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.authtoken.serializers import AuthTokenSerializer | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from knox.views import LoginView as KnoxLoginView | ||||
| from rest_framework import status | ||||
| from rest_framework.authtoken.serializers import AuthTokenSerializer | ||||
| from rest_framework.permissions import AllowAny | ||||
| from rest_framework.response import Response | ||||
| from rest_framework import status | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from .models import User | ||||
| from agents.models import Agent | ||||
| from logs.models import AuditLog | ||||
| from tacticalrmm.utils import notify_error | ||||
|  | ||||
| from .serializers import UserSerializer, TOTPSetupSerializer | ||||
| from .models import User | ||||
| from .serializers import TOTPSetupSerializer, UserSerializer | ||||
|  | ||||
|  | ||||
| class CheckCreds(KnoxLoginView): | ||||
| @@ -81,7 +78,7 @@ class GetAddUsers(APIView): | ||||
|     def post(self, request): | ||||
|         # add new user | ||||
|         try: | ||||
|             user = User.objects.create_user( | ||||
|             user = User.objects.create_user(  # type: ignore | ||||
|                 request.data["username"], | ||||
|                 request.data["email"], | ||||
|                 request.data["password"], | ||||
| @@ -202,4 +199,8 @@ class UserUI(APIView): | ||||
|             user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"] | ||||
|             user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"]) | ||||
|  | ||||
|         if "agents_per_page" in request.data.keys(): | ||||
|             user.agents_per_page = request.data["agents_per_page"] | ||||
|             user.save(update_fields=["agents_per_page"]) | ||||
|  | ||||
|         return Response("ok") | ||||
|   | ||||
| @@ -1,8 +1,7 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import Agent, AgentOutage, RecoveryAction, Note | ||||
| from .models import Agent, Note, RecoveryAction | ||||
|  | ||||
| admin.site.register(Agent) | ||||
| admin.site.register(AgentOutage) | ||||
| admin.site.register(RecoveryAction) | ||||
| admin.site.register(Note) | ||||
|   | ||||
| @@ -1,14 +1,12 @@ | ||||
| import json | ||||
| import os | ||||
| import random | ||||
| import string | ||||
| import os | ||||
| import json | ||||
|  | ||||
| from model_bakery.recipe import Recipe, seq | ||||
| from itertools import cycle | ||||
| from django.utils import timezone as djangotime | ||||
| from django.conf import settings | ||||
|  | ||||
| from .models import Agent | ||||
| from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
| from model_bakery.recipe import Recipe, foreign_key, seq | ||||
|  | ||||
|  | ||||
| def generate_agent_id(hostname): | ||||
| @@ -16,6 +14,9 @@ def generate_agent_id(hostname): | ||||
|     return f"{rand}-{hostname}" | ||||
|  | ||||
|  | ||||
| site = Recipe("clients.Site") | ||||
|  | ||||
|  | ||||
| def get_wmi_data(): | ||||
|     with open( | ||||
|         os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json") | ||||
| @@ -24,12 +25,12 @@ def get_wmi_data(): | ||||
|  | ||||
|  | ||||
| agent = Recipe( | ||||
|     Agent, | ||||
|     "agents.Agent", | ||||
|     site=foreign_key(site), | ||||
|     hostname="DESKTOP-TEST123", | ||||
|     version="1.3.0", | ||||
|     monitoring_type=cycle(["workstation", "server"]), | ||||
|     salt_id=generate_agent_id("DESKTOP-TEST123"), | ||||
|     agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123", | ||||
|     agent_id=seq("asdkj3h4234-1234hg3h4g34-234jjh34|DESKTOP-TEST123"), | ||||
| ) | ||||
|  | ||||
| server_agent = agent.extend( | ||||
| @@ -42,8 +43,12 @@ workstation_agent = agent.extend( | ||||
|  | ||||
| online_agent = agent.extend(last_seen=djangotime.now()) | ||||
|  | ||||
| offline_agent = agent.extend( | ||||
|     last_seen=djangotime.now() - djangotime.timedelta(minutes=7) | ||||
| ) | ||||
|  | ||||
| overdue_agent = agent.extend( | ||||
|     last_seen=djangotime.now() - djangotime.timedelta(minutes=6) | ||||
|     last_seen=djangotime.now() - djangotime.timedelta(minutes=35) | ||||
| ) | ||||
|  | ||||
| agent_with_services = agent.extend( | ||||
|   | ||||
| @@ -0,0 +1,93 @@ | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from agents.models import Agent | ||||
| from clients.models import Client, Site | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Bulk update agent offline/overdue time" | ||||
|  | ||||
|     def add_arguments(self, parser): | ||||
|         parser.add_argument("time", type=int, help="Time in minutes") | ||||
|         parser.add_argument( | ||||
|             "--client", | ||||
|             type=str, | ||||
|             help="Client Name", | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--site", | ||||
|             type=str, | ||||
|             help="Site Name", | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--offline", | ||||
|             action="store_true", | ||||
|             help="Offline", | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--overdue", | ||||
|             action="store_true", | ||||
|             help="Overdue", | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--all", | ||||
|             action="store_true", | ||||
|             help="All agents", | ||||
|         ) | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         time = kwargs["time"] | ||||
|         client_name = kwargs["client"] | ||||
|         site_name = kwargs["site"] | ||||
|         all_agents = kwargs["all"] | ||||
|         offline = kwargs["offline"] | ||||
|         overdue = kwargs["overdue"] | ||||
|         agents = None | ||||
|  | ||||
|         if offline and time < 2: | ||||
|             self.stdout.write(self.style.ERROR("Minimum offline time is 2 minutes")) | ||||
|             return | ||||
|  | ||||
|         if overdue and time < 3: | ||||
|             self.stdout.write(self.style.ERROR("Minimum overdue time is 3 minutes")) | ||||
|             return | ||||
|  | ||||
|         if client_name: | ||||
|             try: | ||||
|                 client = Client.objects.get(name=client_name) | ||||
|             except Client.DoesNotExist: | ||||
|                 self.stdout.write( | ||||
|                     self.style.ERROR(f"Client {client_name} doesn't exist") | ||||
|                 ) | ||||
|                 return | ||||
|  | ||||
|             agents = Agent.objects.filter(site__client=client) | ||||
|  | ||||
|         elif site_name: | ||||
|             try: | ||||
|                 site = Site.objects.get(name=site_name) | ||||
|             except Site.DoesNotExist: | ||||
|                 self.stdout.write(self.style.ERROR(f"Site {site_name} doesn't exist")) | ||||
|                 return | ||||
|  | ||||
|             agents = Agent.objects.filter(site=site) | ||||
|  | ||||
|         elif all_agents: | ||||
|             agents = Agent.objects.all() | ||||
|  | ||||
|         if agents: | ||||
|             if offline: | ||||
|                 agents.update(offline_time=time) | ||||
|                 self.stdout.write( | ||||
|                     self.style.SUCCESS( | ||||
|                         f"Changed offline time on {len(agents)} agents to {time} minutes" | ||||
|                     ) | ||||
|                 ) | ||||
|  | ||||
|             if overdue: | ||||
|                 agents.update(overdue_time=time) | ||||
|                 self.stdout.write( | ||||
|                     self.style.SUCCESS( | ||||
|                         f"Changed overdue time on {len(agents)} agents to {time} minutes" | ||||
|                     ) | ||||
|                 ) | ||||
| @@ -0,0 +1,18 @@ | ||||
| from django.conf import settings | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from agents.models import Agent | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Shows online agents that are not on the latest version" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         q = Agent.objects.exclude(version=settings.LATEST_AGENT_VER).only( | ||||
|             "pk", "version", "last_seen", "overdue_time", "offline_time" | ||||
|         ) | ||||
|         agents = [i for i in q if i.status == "online"] | ||||
|         for agent in agents: | ||||
|             self.stdout.write( | ||||
|                 self.style.SUCCESS(f"{agent.hostname} - v{agent.version}") | ||||
|             ) | ||||
| @@ -1,8 +1,8 @@ | ||||
| # Generated by Django 3.0.6 on 2020-05-31 01:23 | ||||
|  | ||||
| import django.contrib.postgres.fields.jsonb | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.0.7 on 2020-06-09 16:07 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.0.8 on 2020-08-09 05:31 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| # Generated by Django 3.1.1 on 2020-09-22 20:57 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.conf import settings | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.1.2 on 2020-11-01 22:53 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 21:11 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0026_auto_20201125_2334'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='agent', | ||||
|             name='overdue_dashboard_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										23
									
								
								api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-06 15:34 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0027_agent_overdue_dashboard_alert'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='agentoutage', | ||||
|             name='outage_email_sent_time', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='agentoutage', | ||||
|             name='outage_sms_sent_time', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										16
									
								
								api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-10 21:56 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0028_auto_20210206_1534'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.DeleteModel( | ||||
|             name='AgentOutage', | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.6 on 2021-02-16 08:50 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0029_delete_agentoutage'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='agent', | ||||
|             name='offline_time', | ||||
|             field=models.PositiveIntegerField(default=4), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,25 +1,27 @@ | ||||
| import time | ||||
| import asyncio | ||||
| import base64 | ||||
| from Crypto.Cipher import AES | ||||
| from Crypto.Random import get_random_bytes | ||||
| from Crypto.Hash import SHA3_384 | ||||
| from Crypto.Util.Padding import pad | ||||
| import validators | ||||
| import msgpack | ||||
| import re | ||||
| import time | ||||
| from collections import Counter | ||||
| from typing import List | ||||
| from loguru import logger | ||||
| from packaging import version as pyver | ||||
| from distutils.version import LooseVersion | ||||
| from typing import Any, Union | ||||
|  | ||||
| import msgpack | ||||
| import validators | ||||
| from Crypto.Cipher import AES | ||||
| from Crypto.Hash import SHA3_384 | ||||
| from Crypto.Random import get_random_bytes | ||||
| from Crypto.Util.Padding import pad | ||||
| from django.conf import settings | ||||
| from django.db import models | ||||
| from django.utils import timezone as djangotime | ||||
| from loguru import logger | ||||
| from nats.aio.client import Client as NATS | ||||
| from nats.aio.errors import ErrTimeout | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from django.db import models | ||||
| from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from core.models import CoreSettings, TZ_CHOICES | ||||
| from alerts.models import AlertTemplate | ||||
| from core.models import TZ_CHOICES, CoreSettings | ||||
| from logs.models import BaseAuditModel | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
| @@ -50,6 +52,8 @@ class Agent(BaseAuditModel): | ||||
|     mesh_node_id = models.CharField(null=True, blank=True, max_length=255) | ||||
|     overdue_email_alert = models.BooleanField(default=False) | ||||
|     overdue_text_alert = models.BooleanField(default=False) | ||||
|     overdue_dashboard_alert = models.BooleanField(default=False) | ||||
|     offline_time = models.PositiveIntegerField(default=4) | ||||
|     overdue_time = models.PositiveIntegerField(default=30) | ||||
|     check_interval = models.PositiveIntegerField(default=120) | ||||
|     needs_reboot = models.BooleanField(default=False) | ||||
| @@ -75,6 +79,24 @@ class Agent(BaseAuditModel): | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|  | ||||
|         # get old agent if exists | ||||
|         old_agent = type(self).objects.get(pk=self.pk) if self.pk else None | ||||
|         super(BaseAuditModel, self).save(*args, **kwargs) | ||||
|  | ||||
|         # check if new agent has been create | ||||
|         # or check if policy have changed on agent | ||||
|         # or if site has changed on agent and if so generate-policies | ||||
|         if ( | ||||
|             not old_agent | ||||
|             or old_agent | ||||
|             and old_agent.policy != self.policy | ||||
|             or old_agent.site != self.site | ||||
|         ): | ||||
|             self.generate_checks_from_policies() | ||||
|             self.generate_tasks_from_policies() | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.hostname | ||||
|  | ||||
| @@ -127,7 +149,7 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|     @property | ||||
|     def status(self): | ||||
|         offline = djangotime.now() - djangotime.timedelta(minutes=6) | ||||
|         offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time) | ||||
|         overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time) | ||||
|  | ||||
|         if self.last_seen is not None: | ||||
| @@ -142,14 +164,14 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|     @property | ||||
|     def has_patches_pending(self): | ||||
|         return self.winupdates.filter(action="approve").filter(installed=False).exists() | ||||
|         return self.winupdates.filter(action="approve").filter(installed=False).exists()  # type: ignore | ||||
|  | ||||
|     @property | ||||
|     def checks(self): | ||||
|         total, passing, failing = 0, 0, 0 | ||||
|  | ||||
|         if self.agentchecks.exists(): | ||||
|             for i in self.agentchecks.all(): | ||||
|         if self.agentchecks.exists():  # type: ignore | ||||
|             for i in self.agentchecks.all():  # type: ignore | ||||
|                 total += 1 | ||||
|                 if i.status == "passing": | ||||
|                     passing += 1 | ||||
| @@ -219,6 +241,7 @@ class Agent(BaseAuditModel): | ||||
|             pass | ||||
|  | ||||
|         try: | ||||
|             comp_sys_prod = self.wmi_detail["comp_sys_prod"][0] | ||||
|             return [x["Version"] for x in comp_sys_prod if "Version" in x][0] | ||||
|         except: | ||||
|             pass | ||||
| @@ -248,33 +271,90 @@ class Agent(BaseAuditModel): | ||||
|         except: | ||||
|             return ["unknown disk"] | ||||
|  | ||||
|     def run_script( | ||||
|         self, | ||||
|         scriptpk: int, | ||||
|         args: list[str] = [], | ||||
|         timeout: int = 120, | ||||
|         full: bool = False, | ||||
|         wait: bool = False, | ||||
|         run_on_any: bool = False, | ||||
|     ) -> Any: | ||||
|  | ||||
|         from scripts.models import Script | ||||
|  | ||||
|         script = Script.objects.get(pk=scriptpk) | ||||
|         data = { | ||||
|             "func": "runscriptfull" if full else "runscript", | ||||
|             "timeout": timeout, | ||||
|             "script_args": args, | ||||
|             "payload": { | ||||
|                 "code": script.code, | ||||
|                 "shell": script.shell, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         running_agent = self | ||||
|         if run_on_any: | ||||
|             nats_ping = {"func": "ping"} | ||||
|  | ||||
|             # try on self first | ||||
|             r = asyncio.run(self.nats_cmd(nats_ping, timeout=1)) | ||||
|  | ||||
|             if r == "pong": | ||||
|                 running_agent = self | ||||
|             else: | ||||
|                 online = [ | ||||
|                     agent | ||||
|                     for agent in Agent.objects.only( | ||||
|                         "pk", "last_seen", "overdue_time", "offline_time" | ||||
|                     ) | ||||
|                     if agent.status == "online" | ||||
|                 ] | ||||
|  | ||||
|                 for agent in online: | ||||
|                     r = asyncio.run(agent.nats_cmd(nats_ping, timeout=1)) | ||||
|                     if r == "pong": | ||||
|                         running_agent = agent | ||||
|                         break | ||||
|  | ||||
|                 if running_agent.pk == self.pk: | ||||
|                     return "Unable to find an online agent" | ||||
|  | ||||
|         if wait: | ||||
|             return asyncio.run(running_agent.nats_cmd(data, timeout=timeout, wait=True)) | ||||
|         else: | ||||
|             asyncio.run(running_agent.nats_cmd(data, wait=False)) | ||||
|  | ||||
|         return "ok" | ||||
|  | ||||
|     # auto approves updates | ||||
|     def approve_updates(self): | ||||
|         patch_policy = self.get_patch_policy() | ||||
|  | ||||
|         updates = list() | ||||
|         if patch_policy.critical == "approve": | ||||
|             updates += self.winupdates.filter( | ||||
|             updates += self.winupdates.filter(  # type: ignore | ||||
|                 severity="Critical", installed=False | ||||
|             ).exclude(action="approve") | ||||
|  | ||||
|         if patch_policy.important == "approve": | ||||
|             updates += self.winupdates.filter( | ||||
|             updates += self.winupdates.filter(  # type: ignore | ||||
|                 severity="Important", installed=False | ||||
|             ).exclude(action="approve") | ||||
|  | ||||
|         if patch_policy.moderate == "approve": | ||||
|             updates += self.winupdates.filter( | ||||
|             updates += self.winupdates.filter(  # type: ignore | ||||
|                 severity="Moderate", installed=False | ||||
|             ).exclude(action="approve") | ||||
|  | ||||
|         if patch_policy.low == "approve": | ||||
|             updates += self.winupdates.filter(severity="Low", installed=False).exclude( | ||||
|             updates += self.winupdates.filter(severity="Low", installed=False).exclude(  # type: ignore | ||||
|                 action="approve" | ||||
|             ) | ||||
|  | ||||
|         if patch_policy.other == "approve": | ||||
|             updates += self.winupdates.filter(severity="", installed=False).exclude( | ||||
|             updates += self.winupdates.filter(severity="", installed=False).exclude(  # type: ignore | ||||
|                 action="approve" | ||||
|             ) | ||||
|  | ||||
| @@ -289,7 +369,7 @@ class Agent(BaseAuditModel): | ||||
|         site = self.site | ||||
|         core_settings = CoreSettings.objects.first() | ||||
|         patch_policy = None | ||||
|         agent_policy = self.winupdatepolicy.get() | ||||
|         agent_policy = self.winupdatepolicy.get()  # type: ignore | ||||
|  | ||||
|         if self.monitoring_type == "server": | ||||
|             # check agent policy first which should override client or site policy | ||||
| @@ -374,18 +454,125 @@ class Agent(BaseAuditModel): | ||||
|  | ||||
|         return patch_policy | ||||
|  | ||||
|     def get_approved_update_guids(self) -> List[str]: | ||||
|     def get_approved_update_guids(self) -> list[str]: | ||||
|         return list( | ||||
|             self.winupdates.filter(action="approve", installed=False).values_list( | ||||
|             self.winupdates.filter(action="approve", installed=False).values_list(  # type: ignore | ||||
|                 "guid", flat=True | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     # returns alert template assigned in the following order: policy, site, client, global | ||||
|     # will return None if nothing is found | ||||
|     def get_alert_template(self) -> Union[AlertTemplate, None]: | ||||
|  | ||||
|         site = self.site | ||||
|         client = self.client | ||||
|         core = CoreSettings.objects.first() | ||||
|  | ||||
|         templates = list() | ||||
|         # check if alert template is on a policy assigned to agent | ||||
|         if ( | ||||
|             self.policy | ||||
|             and self.policy.alert_template | ||||
|             and self.policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(self.policy.alert_template) | ||||
|  | ||||
|         # check if policy with alert template is assigned to the site | ||||
|         if ( | ||||
|             self.monitoring_type == "server" | ||||
|             and site.server_policy | ||||
|             and site.server_policy.alert_template | ||||
|             and site.server_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(site.server_policy.alert_template) | ||||
|         if ( | ||||
|             self.monitoring_type == "workstation" | ||||
|             and site.workstation_policy | ||||
|             and site.workstation_policy.alert_template | ||||
|             and site.workstation_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(site.workstation_policy.alert_template) | ||||
|  | ||||
|         # check if alert template is assigned to site | ||||
|         if site.alert_template and site.alert_template.is_active: | ||||
|             templates.append(site.alert_template) | ||||
|  | ||||
|         # check if policy with alert template is assigned to the client | ||||
|         if ( | ||||
|             self.monitoring_type == "server" | ||||
|             and client.server_policy | ||||
|             and client.server_policy.alert_template | ||||
|             and client.server_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(client.server_policy.alert_template) | ||||
|         if ( | ||||
|             self.monitoring_type == "workstation" | ||||
|             and client.workstation_policy | ||||
|             and client.workstation_policy.alert_template | ||||
|             and client.workstation_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(client.workstation_policy.alert_template) | ||||
|  | ||||
|         # check if alert template is on client and return | ||||
|         if client.alert_template and client.alert_template.is_active: | ||||
|             templates.append(client.alert_template) | ||||
|  | ||||
|         # check if alert template is applied globally and return | ||||
|         if core.alert_template and core.alert_template.is_active: | ||||
|             templates.append(core.alert_template) | ||||
|  | ||||
|         # if agent is a workstation, check if policy with alert template is assigned to the site, client, or core | ||||
|         if ( | ||||
|             self.monitoring_type == "server" | ||||
|             and core.server_policy | ||||
|             and core.server_policy.alert_template | ||||
|             and core.server_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(core.server_policy.alert_template) | ||||
|         if ( | ||||
|             self.monitoring_type == "workstation" | ||||
|             and core.workstation_policy | ||||
|             and core.workstation_policy.alert_template | ||||
|             and core.workstation_policy.alert_template.is_active | ||||
|         ): | ||||
|             templates.append(core.workstation_policy.alert_template) | ||||
|  | ||||
|         # go through the templates and return the first one that isn't excluded | ||||
|         for template in templates: | ||||
|             # check if client, site, or agent has been excluded from template | ||||
|             if ( | ||||
|                 client.pk | ||||
|                 in template.excluded_clients.all().values_list("pk", flat=True) | ||||
|                 or site.pk in template.excluded_sites.all().values_list("pk", flat=True) | ||||
|                 or self.pk | ||||
|                 in template.excluded_agents.all() | ||||
|                 .only("pk") | ||||
|                 .values_list("pk", flat=True) | ||||
|             ): | ||||
|                 continue | ||||
|  | ||||
|             # check if template is excluding desktops | ||||
|             elif ( | ||||
|                 self.monitoring_type == "workstation" and template.exclude_workstations | ||||
|             ): | ||||
|                 continue | ||||
|  | ||||
|             # check if template is excluding servers | ||||
|             elif self.monitoring_type == "server" and template.exclude_servers: | ||||
|                 continue | ||||
|  | ||||
|             else: | ||||
|                 return template | ||||
|  | ||||
|         # no alert templates found or agent has been excluded | ||||
|         return None | ||||
|  | ||||
|     def generate_checks_from_policies(self): | ||||
|         from automation.models import Policy | ||||
|  | ||||
|         # Clear agent checks that have overriden_by_policy set | ||||
|         self.agentchecks.update(overriden_by_policy=False) | ||||
|         self.agentchecks.update(overriden_by_policy=False)  # type: ignore | ||||
|  | ||||
|         # Generate checks based on policies | ||||
|         Policy.generate_policy_checks(self) | ||||
| @@ -420,7 +607,7 @@ class Agent(BaseAuditModel): | ||||
|         except Exception: | ||||
|             return "err" | ||||
|  | ||||
|     async def nats_cmd(self, data, timeout=30, wait=True): | ||||
|     async def nats_cmd(self, data: dict, timeout: int = 30, wait: bool = True): | ||||
|         nc = NATS() | ||||
|         options = { | ||||
|             "servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222", | ||||
| @@ -442,7 +629,7 @@ class Agent(BaseAuditModel): | ||||
|             except ErrTimeout: | ||||
|                 ret = "timeout" | ||||
|             else: | ||||
|                 ret = msgpack.loads(msg.data) | ||||
|                 ret = msgpack.loads(msg.data)  # type: ignore | ||||
|  | ||||
|             await nc.close() | ||||
|             return ret | ||||
| @@ -464,12 +651,12 @@ class Agent(BaseAuditModel): | ||||
|     def delete_superseded_updates(self): | ||||
|         try: | ||||
|             pks = []  # list of pks to delete | ||||
|             kbs = list(self.winupdates.values_list("kb", flat=True)) | ||||
|             kbs = list(self.winupdates.values_list("kb", flat=True))  # type: ignore | ||||
|             d = Counter(kbs) | ||||
|             dupes = [k for k, v in d.items() if v > 1] | ||||
|  | ||||
|             for dupe in dupes: | ||||
|                 titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True) | ||||
|                 titles = self.winupdates.filter(kb=dupe).values_list("title", flat=True)  # type: ignore | ||||
|                 # extract the version from the title and sort from oldest to newest | ||||
|                 # skip if no version info is available therefore nothing to parse | ||||
|                 try: | ||||
| @@ -482,24 +669,24 @@ class Agent(BaseAuditModel): | ||||
|                     continue | ||||
|                 # append all but the latest version to our list of pks to delete | ||||
|                 for ver in sorted_vers[:-1]: | ||||
|                     q = self.winupdates.filter(kb=dupe).filter(title__contains=ver) | ||||
|                     q = self.winupdates.filter(kb=dupe).filter(title__contains=ver)  # type: ignore | ||||
|                     pks.append(q.first().pk) | ||||
|  | ||||
|             pks = list(set(pks)) | ||||
|             self.winupdates.filter(pk__in=pks).delete() | ||||
|             self.winupdates.filter(pk__in=pks).delete()  # type: ignore | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     # define how the agent should handle pending actions | ||||
|     def handle_pending_actions(self): | ||||
|         pending_actions = self.pendingactions.filter(status="pending") | ||||
|         pending_actions = self.pendingactions.filter(status="pending")  # type: ignore | ||||
|  | ||||
|         for action in pending_actions: | ||||
|             if action.action_type == "taskaction": | ||||
|                 from autotasks.tasks import ( | ||||
|                     create_win_task_schedule, | ||||
|                     enable_or_disable_win_task, | ||||
|                     delete_win_task_schedule, | ||||
|                     enable_or_disable_win_task, | ||||
|                 ) | ||||
|  | ||||
|                 task_id = action.details["task_id"] | ||||
| @@ -516,77 +703,77 @@ class Agent(BaseAuditModel): | ||||
|     # for clearing duplicate pending actions on agent | ||||
|     def remove_matching_pending_task_actions(self, task_id): | ||||
|         # remove any other pending actions on agent with same task_id | ||||
|         for action in self.pendingactions.exclude(status="completed"): | ||||
|         for action in self.pendingactions.exclude(status="completed"):  # type: ignore | ||||
|             if action.details["task_id"] == task_id: | ||||
|                 action.delete() | ||||
|  | ||||
|  | ||||
| class AgentOutage(models.Model): | ||||
|     agent = models.ForeignKey( | ||||
|         Agent, | ||||
|         related_name="agentoutages", | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         on_delete=models.CASCADE, | ||||
|     ) | ||||
|     outage_time = models.DateTimeField(auto_now_add=True) | ||||
|     recovery_time = models.DateTimeField(null=True, blank=True) | ||||
|     outage_email_sent = models.BooleanField(default=False) | ||||
|     outage_sms_sent = models.BooleanField(default=False) | ||||
|     recovery_email_sent = models.BooleanField(default=False) | ||||
|     recovery_sms_sent = models.BooleanField(default=False) | ||||
|  | ||||
|     @property | ||||
|     def is_active(self): | ||||
|         return False if self.recovery_time else True | ||||
|     def should_create_alert(self, alert_template): | ||||
|         return ( | ||||
|             self.overdue_dashboard_alert | ||||
|             or self.overdue_email_alert | ||||
|             or self.overdue_text_alert | ||||
|             or ( | ||||
|                 alert_template | ||||
|                 and ( | ||||
|                     alert_template.agent_always_alert | ||||
|                     or alert_template.agent_always_email | ||||
|                     or alert_template.agent_always_text | ||||
|                 ) | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def send_outage_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE.send_mail( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue", | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", | ||||
|             ( | ||||
|                 f"Data has not been received from client {self.agent.client.name}, " | ||||
|                 f"site {self.agent.site.name}, " | ||||
|                 f"agent {self.agent.hostname} " | ||||
|                 f"Data has not been received from client {self.client.name}, " | ||||
|                 f"site {self.site.name}, " | ||||
|                 f"agent {self.hostname} " | ||||
|                 "within the expected time." | ||||
|             ), | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_recovery_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE.send_mail( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received", | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data received", | ||||
|             ( | ||||
|                 f"Data has been received from client {self.agent.client.name}, " | ||||
|                 f"site {self.agent.site.name}, " | ||||
|                 f"agent {self.agent.hostname} " | ||||
|                 f"Data has been received from client {self.client.name}, " | ||||
|                 f"site {self.site.name}, " | ||||
|                 f"agent {self.hostname} " | ||||
|                 "after an interruption in data transmission." | ||||
|             ), | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_outage_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         CORE.send_sms( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue" | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue", | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def send_recovery_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.get_alert_template() | ||||
|         CORE.send_sms( | ||||
|             f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received" | ||||
|             f"{self.client.name}, {self.site.name}, {self.hostname} - data received", | ||||
|             alert_template=alert_template, | ||||
|         ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.agent.hostname | ||||
|  | ||||
|  | ||||
| RECOVERY_CHOICES = [ | ||||
|     ("salt", "Salt"), | ||||
|   | ||||
| @@ -1,13 +1,11 @@ | ||||
| import pytz | ||||
|  | ||||
| from rest_framework import serializers | ||||
| from rest_framework.fields import ReadOnlyField | ||||
|  | ||||
| from clients.serializers import ClientSerializer | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .models import Agent, Note | ||||
|  | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
| from clients.serializers import ClientSerializer | ||||
|  | ||||
|  | ||||
| class AgentSerializer(serializers.ModelSerializer): | ||||
|     # for vue | ||||
| @@ -37,7 +35,12 @@ class AgentSerializer(serializers.ModelSerializer): | ||||
| class AgentOverdueActionSerializer(serializers.ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Agent | ||||
|         fields = ["pk", "overdue_email_alert", "overdue_text_alert"] | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_dashboard_alert", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class AgentTableSerializer(serializers.ModelSerializer): | ||||
| @@ -50,6 +53,21 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|     site_name = serializers.ReadOnlyField(source="site.name") | ||||
|     logged_username = serializers.SerializerMethodField() | ||||
|     italic = serializers.SerializerMethodField() | ||||
|     policy = serializers.ReadOnlyField(source="policy.id") | ||||
|     alert_template = serializers.SerializerMethodField() | ||||
|  | ||||
|     def get_alert_template(self, obj): | ||||
|         alert_template = obj.get_alert_template() | ||||
|  | ||||
|         if not alert_template: | ||||
|             return None | ||||
|         else: | ||||
|             return { | ||||
|                 "name": alert_template.name, | ||||
|                 "always_email": alert_template.agent_always_email, | ||||
|                 "always_text": alert_template.agent_always_text, | ||||
|                 "always_alert": alert_template.agent_always_alert, | ||||
|             } | ||||
|  | ||||
|     def get_pending_actions(self, obj): | ||||
|         return obj.pendingactions.filter(status="pending").count() | ||||
| @@ -60,7 +78,7 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|         else: | ||||
|             agent_tz = self.context["default_tz"] | ||||
|  | ||||
|         return obj.last_seen.astimezone(agent_tz).timestamp() | ||||
|         return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M") | ||||
|  | ||||
|     def get_logged_username(self, obj) -> str: | ||||
|         if obj.logged_in_username == "None" and obj.status == "online": | ||||
| @@ -77,6 +95,7 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|         model = Agent | ||||
|         fields = [ | ||||
|             "id", | ||||
|             "alert_template", | ||||
|             "hostname", | ||||
|             "agent_id", | ||||
|             "site_name", | ||||
| @@ -89,12 +108,14 @@ class AgentTableSerializer(serializers.ModelSerializer): | ||||
|             "status", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_dashboard_alert", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "checks", | ||||
|             "maintenance_mode", | ||||
|             "logged_username", | ||||
|             "italic", | ||||
|             "policy", | ||||
|         ] | ||||
|         depth = 2 | ||||
|  | ||||
| @@ -120,10 +141,12 @@ class AgentEditSerializer(serializers.ModelSerializer): | ||||
|             "timezone", | ||||
|             "check_interval", | ||||
|             "overdue_time", | ||||
|             "offline_time", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "all_timezones", | ||||
|             "winupdatepolicy", | ||||
|             "policy", | ||||
|         ] | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,17 +1,19 @@ | ||||
| import asyncio | ||||
| from loguru import logger | ||||
| from time import sleep | ||||
| import datetime as dt | ||||
| import random | ||||
| from packaging import version as pyver | ||||
| from typing import List | ||||
| from time import sleep | ||||
| from typing import Union | ||||
|  | ||||
| from django.conf import settings | ||||
| from scripts.models import Script | ||||
| from django.utils import timezone as djangotime | ||||
| from loguru import logger | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
| from agents.models import Agent, AgentOutage | ||||
| from agents.models import Agent | ||||
| from core.models import CoreSettings | ||||
| from logs.models import PendingAction | ||||
| from scripts.models import Script | ||||
| from tacticalrmm.celery import app | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| @@ -75,7 +77,7 @@ def agent_update(pk: int) -> str: | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def send_agent_update_task(pks: List[int]) -> None: | ||||
| def send_agent_update_task(pks: list[int]) -> None: | ||||
|     chunks = (pks[i : i + 30] for i in range(0, len(pks), 30)) | ||||
|     for chunk in chunks: | ||||
|         for pk in chunk: | ||||
| @@ -91,7 +93,7 @@ def auto_self_agent_update_task() -> None: | ||||
|         return | ||||
|  | ||||
|     q = Agent.objects.only("pk", "version") | ||||
|     pks: List[int] = [ | ||||
|     pks: list[int] = [ | ||||
|         i.pk | ||||
|         for i in q | ||||
|         if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
| @@ -106,66 +108,96 @@ def auto_self_agent_update_task() -> None: | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outage_email_task(pk): | ||||
| def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 15)) | ||||
|         alert.agent.send_outage_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 alert.agent.send_outage_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_recovery_email_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     sleep(random.randint(1, 15)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_outage_email() | ||||
|     outage.outage_email_sent = True | ||||
|     outage.save(update_fields=["outage_email_sent"]) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     alert.agent.send_recovery_email() | ||||
|     alert.resolved_email_sent = djangotime.now() | ||||
|     alert.save(update_fields=["resolved_email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_recovery_email_task(pk): | ||||
|     sleep(random.randint(1, 15)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_recovery_email() | ||||
|     outage.recovery_email_sent = True | ||||
|     outage.save(update_fields=["recovery_email_sent"]) | ||||
| def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     if not alert.sms_sent: | ||||
|         sleep(random.randint(1, 15)) | ||||
|         alert.agent.send_outage_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send an sms only if the last sms sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 alert.agent.send_outage_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outage_sms_task(pk): | ||||
| def agent_recovery_sms_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     sleep(random.randint(1, 3)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_outage_sms() | ||||
|     outage.outage_sms_sent = True | ||||
|     outage.save(update_fields=["outage_sms_sent"]) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|     alert.agent.send_recovery_sms() | ||||
|     alert.resolved_sms_sent = djangotime.now() | ||||
|     alert.save(update_fields=["resolved_sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_recovery_sms_task(pk): | ||||
|     sleep(random.randint(1, 3)) | ||||
|     outage = AgentOutage.objects.get(pk=pk) | ||||
|     outage.send_recovery_sms() | ||||
|     outage.recovery_sms_sent = True | ||||
|     outage.save(update_fields=["recovery_sms_sent"]) | ||||
| def agent_outages_task() -> None: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def agent_outages_task(): | ||||
|     agents = Agent.objects.only( | ||||
|         "pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert" | ||||
|         "pk", | ||||
|         "last_seen", | ||||
|         "offline_time", | ||||
|         "overdue_time", | ||||
|         "overdue_email_alert", | ||||
|         "overdue_text_alert", | ||||
|         "overdue_dashboard_alert", | ||||
|     ) | ||||
|  | ||||
|     for agent in agents: | ||||
|         if agent.overdue_email_alert or agent.overdue_text_alert: | ||||
|             if agent.status == "overdue": | ||||
|                 outages = AgentOutage.objects.filter(agent=agent) | ||||
|                 if outages and outages.last().is_active: | ||||
|                     continue | ||||
|  | ||||
|                 outage = AgentOutage(agent=agent) | ||||
|                 outage.save() | ||||
|  | ||||
|                 # add a null check history to allow gaps in graph | ||||
|                 for check in agent.agentchecks.all(): | ||||
|                     check.add_check_history(None) | ||||
|  | ||||
|                 if agent.overdue_email_alert and not agent.maintenance_mode: | ||||
|                     agent_outage_email_task.delay(pk=outage.pk) | ||||
|  | ||||
|                 if agent.overdue_text_alert and not agent.maintenance_mode: | ||||
|                     agent_outage_sms_task.delay(pk=outage.pk) | ||||
|         if agent.status == "overdue": | ||||
|             Alert.handle_alert_failure(agent) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| @@ -184,12 +216,17 @@ def handle_agent_recovery_task(pk: int) -> None: | ||||
|  | ||||
| @app.task | ||||
| def run_script_email_results_task( | ||||
|     agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str] | ||||
|     agentpk: int, | ||||
|     scriptpk: int, | ||||
|     nats_timeout: int, | ||||
|     emails: list[str], | ||||
|     args: list[str] = [], | ||||
| ): | ||||
|     agent = Agent.objects.get(pk=agentpk) | ||||
|     script = Script.objects.get(pk=scriptpk) | ||||
|     nats_data["func"] = "runscriptfull" | ||||
|     r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout)) | ||||
|     r = agent.run_script( | ||||
|         scriptpk=script.pk, args=args, full=True, timeout=nats_timeout, wait=True | ||||
|     ) | ||||
|     if r == "timeout": | ||||
|         logger.error(f"{agent.hostname} timed out running script.") | ||||
|         return | ||||
| @@ -229,18 +266,3 @@ def run_script_email_results_task( | ||||
|                 server.quit() | ||||
|     except Exception as e: | ||||
|         logger.error(e) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def remove_salt_task() -> None: | ||||
|     if hasattr(settings, "KEEP_SALT") and settings.KEEP_SALT: | ||||
|         return | ||||
|  | ||||
|     q = Agent.objects.only("pk", "version") | ||||
|     agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")] | ||||
|     chunks = (agents[i : i + 50] for i in range(0, len(agents), 50)) | ||||
|     for chunk in chunks: | ||||
|         for agent in chunk: | ||||
|             asyncio.run(agent.nats_cmd({"func": "removesalt"}, wait=False)) | ||||
|             sleep(0.1) | ||||
|         sleep(4) | ||||
|   | ||||
| @@ -1,23 +1,121 @@ | ||||
| import json | ||||
| import os | ||||
| from itertools import cycle | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from django.conf import settings | ||||
| from model_bakery import baker | ||||
| from itertools import cycle | ||||
| from typing import List | ||||
| from packaging import version as pyver | ||||
|  | ||||
|  | ||||
| from django.conf import settings | ||||
|  | ||||
| from logs.models import PendingAction | ||||
|  | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from .serializers import AgentSerializer | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
| from .models import Agent | ||||
| from .tasks import auto_self_agent_update_task | ||||
| from winupdate.models import WinUpdatePolicy | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .models import Agent | ||||
| from .serializers import AgentSerializer | ||||
| from .tasks import auto_self_agent_update_task | ||||
|  | ||||
|  | ||||
| class TestAgentsList(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     def test_agents_list(self): | ||||
|         url = "/agents/listagents/" | ||||
|  | ||||
|         # 36 total agents | ||||
|         company1 = baker.make("clients.Client") | ||||
|         company2 = baker.make("clients.Client") | ||||
|         site1 = baker.make("clients.Site", client=company1) | ||||
|         site2 = baker.make("clients.Site", client=company1) | ||||
|         site3 = baker.make("clients.Site", client=company2) | ||||
|  | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", site=site1, monitoring_type="server", _quantity=15 | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=site2, | ||||
|             monitoring_type="workstation", | ||||
|             _quantity=10, | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=site3, | ||||
|             monitoring_type="server", | ||||
|             _quantity=4, | ||||
|         ) | ||||
|         baker.make_recipe( | ||||
|             "agents.online_agent", | ||||
|             site=site3, | ||||
|             monitoring_type="workstation", | ||||
|             _quantity=7, | ||||
|         ) | ||||
|  | ||||
|         data = { | ||||
|             "pagination": { | ||||
|                 "rowsPerPage": 50, | ||||
|                 "rowsNumber": None, | ||||
|                 "sortBy": "hostname", | ||||
|                 "descending": False, | ||||
|                 "page": 1, | ||||
|             }, | ||||
|             "monType": "mixed", | ||||
|         } | ||||
|  | ||||
|         # test mixed | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["total"], 36)  # type: ignore | ||||
|         self.assertEqual(len(r.data["agents"]), 36)  # type: ignore | ||||
|  | ||||
|         # test servers | ||||
|         data["monType"] = "server" | ||||
|         data["pagination"]["rowsPerPage"] = 6 | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["total"], 19)  # type: ignore | ||||
|         self.assertEqual(len(r.data["agents"]), 6)  # type: ignore | ||||
|  | ||||
|         # test workstations | ||||
|         data["monType"] = "server" | ||||
|         data["pagination"]["rowsPerPage"] = 6 | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["total"], 19)  # type: ignore | ||||
|         self.assertEqual(len(r.data["agents"]), 6)  # type: ignore | ||||
|  | ||||
|         # test client1 mixed | ||||
|         data = { | ||||
|             "pagination": { | ||||
|                 "rowsPerPage": 3, | ||||
|                 "rowsNumber": None, | ||||
|                 "sortBy": "hostname", | ||||
|                 "descending": False, | ||||
|                 "page": 1, | ||||
|             }, | ||||
|             "monType": "mixed", | ||||
|             "clientPK": company1.pk,  # type: ignore | ||||
|         } | ||||
|  | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["total"], 25)  # type: ignore | ||||
|         self.assertEqual(len(r.data["agents"]), 3)  # type: ignore | ||||
|  | ||||
|         # test site3 workstations | ||||
|         del data["clientPK"] | ||||
|         data["monType"] = "workstation" | ||||
|         data["sitePK"] = site3.pk  # type: ignore | ||||
|  | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["total"], 7)  # type: ignore | ||||
|         self.assertEqual(len(r.data["agents"]), 3)  # type: ignore | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|  | ||||
| class TestAgentViews(TacticalTestCase): | ||||
| @@ -80,12 +178,12 @@ class TestAgentViews(TacticalTestCase): | ||||
|             _quantity=15, | ||||
|         ) | ||||
|  | ||||
|         pks: List[int] = list( | ||||
|         pks: list[int] = list( | ||||
|             Agent.objects.only("pk", "version").values_list("pk", flat=True) | ||||
|         ) | ||||
|  | ||||
|         data = {"pks": pks} | ||||
|         expected: List[int] = [ | ||||
|         expected: list[int] = [ | ||||
|             i.pk | ||||
|             for i in Agent.objects.only("pk", "version") | ||||
|             if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
| @@ -259,7 +357,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|         mock_ret.return_value = "nt authority\system" | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertIsInstance(r.data, str) | ||||
|         self.assertIsInstance(r.data, str)  # type: ignore | ||||
|  | ||||
|         mock_ret.return_value = "timeout" | ||||
|         r = self.client.post(url, data, format="json") | ||||
| @@ -279,15 +377,15 @@ class TestAgentViews(TacticalTestCase): | ||||
|         nats_cmd.return_value = "ok" | ||||
|         r = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM") | ||||
|         self.assertEqual(r.data["agent"], self.agent.hostname) | ||||
|         self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")  # type: ignore | ||||
|         self.assertEqual(r.data["agent"], self.agent.hostname)  # type: ignore | ||||
|  | ||||
|         nats_data = { | ||||
|             "func": "schedtask", | ||||
|             "schedtaskpayload": { | ||||
|                 "type": "schedreboot", | ||||
|                 "trigger": "once", | ||||
|                 "name": r.data["task_name"], | ||||
|                 "name": r.data["task_name"],  # type: ignore | ||||
|                 "year": 2025, | ||||
|                 "month": "August", | ||||
|                 "day": 29, | ||||
| @@ -308,7 +406,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|         r = self.client.patch(url, data_invalid, format="json") | ||||
|  | ||||
|         self.assertEqual(r.status_code, 400) | ||||
|         self.assertEqual(r.data, "Invalid date") | ||||
|         self.assertEqual(r.data, "Invalid date")  # type: ignore | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
| @@ -319,8 +417,8 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         site = baker.make("clients.Site") | ||||
|         data = { | ||||
|             "client": site.client.id, | ||||
|             "site": site.id, | ||||
|             "client": site.client.id,  # type: ignore | ||||
|             "site": site.id,  # type: ignore | ||||
|             "arch": "64", | ||||
|             "expires": 23, | ||||
|             "installMethod": "exe", | ||||
| @@ -359,7 +457,6 @@ class TestAgentViews(TacticalTestCase): | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertIn("rdp", r.json()["cmd"]) | ||||
|         self.assertNotIn("power", r.json()["cmd"]) | ||||
|         self.assertNotIn("ping", r.json()["cmd"]) | ||||
|  | ||||
|         data.update({"ping": 1, "power": 1}) | ||||
|         r = self.client.post(url, data, format="json") | ||||
| @@ -405,14 +502,6 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_agents_list(self): | ||||
|         url = "/agents/listagents/" | ||||
|  | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_agents_agent_detail(self): | ||||
|         url = f"/agents/{self.agent.pk}/agentdetail/" | ||||
|  | ||||
| @@ -429,9 +518,10 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         edit = { | ||||
|             "id": self.agent.pk, | ||||
|             "site": site.id, | ||||
|             "site": site.id,  # type: ignore | ||||
|             "monitoring_type": "workstation", | ||||
|             "description": "asjdk234andasd", | ||||
|             "offline_time": 4, | ||||
|             "overdue_time": 300, | ||||
|             "check_interval": 60, | ||||
|             "overdue_email_alert": True, | ||||
| @@ -459,7 +549,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         agent = Agent.objects.get(pk=self.agent.pk) | ||||
|         data = AgentSerializer(agent).data | ||||
|         self.assertEqual(data["site"], site.id) | ||||
|         self.assertEqual(data["site"], site.id)  # type: ignore | ||||
|  | ||||
|         policy = WinUpdatePolicy.objects.get(agent=self.agent) | ||||
|         data = WinUpdatePolicySerializer(policy).data | ||||
| @@ -477,21 +567,21 @@ class TestAgentViews(TacticalTestCase): | ||||
|         # TODO | ||||
|         # decode the cookie | ||||
|  | ||||
|         self.assertIn("&viewmode=13", r.data["file"]) | ||||
|         self.assertIn("&viewmode=12", r.data["terminal"]) | ||||
|         self.assertIn("&viewmode=11", r.data["control"]) | ||||
|         self.assertIn("&viewmode=13", r.data["file"])  # type: ignore | ||||
|         self.assertIn("&viewmode=12", r.data["terminal"])  # type: ignore | ||||
|         self.assertIn("&viewmode=11", r.data["control"])  # type: ignore | ||||
|  | ||||
|         self.assertIn("&gotonode=", r.data["file"]) | ||||
|         self.assertIn("&gotonode=", r.data["terminal"]) | ||||
|         self.assertIn("&gotonode=", r.data["control"]) | ||||
|         self.assertIn("&gotonode=", r.data["file"])  # type: ignore | ||||
|         self.assertIn("&gotonode=", r.data["terminal"])  # type: ignore | ||||
|         self.assertIn("&gotonode=", r.data["control"])  # type: ignore | ||||
|  | ||||
|         self.assertIn("?login=", r.data["file"]) | ||||
|         self.assertIn("?login=", r.data["terminal"]) | ||||
|         self.assertIn("?login=", r.data["control"]) | ||||
|         self.assertIn("?login=", r.data["file"])  # type: ignore | ||||
|         self.assertIn("?login=", r.data["terminal"])  # type: ignore | ||||
|         self.assertIn("?login=", r.data["control"])  # type: ignore | ||||
|  | ||||
|         self.assertEqual(self.agent.hostname, r.data["hostname"]) | ||||
|         self.assertEqual(self.agent.client.name, r.data["client"]) | ||||
|         self.assertEqual(self.agent.site.name, r.data["site"]) | ||||
|         self.assertEqual(self.agent.hostname, r.data["hostname"])  # type: ignore | ||||
|         self.assertEqual(self.agent.client.name, r.data["client"])  # type: ignore | ||||
|         self.assertEqual(self.agent.site.name, r.data["site"])  # type: ignore | ||||
|  | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|  | ||||
| @@ -501,32 +591,6 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_by_client(self): | ||||
|         url = f"/agents/byclient/{self.agent.client.id}/" | ||||
|  | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertTrue(r.data) | ||||
|  | ||||
|         url = f"/agents/byclient/500/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertFalse(r.data)  # returns empty list | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_by_site(self): | ||||
|         url = f"/agents/bysite/{self.agent.site.id}/" | ||||
|  | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertTrue(r.data) | ||||
|  | ||||
|         url = f"/agents/bysite/500/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.data, []) | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_overdue_action(self): | ||||
|         url = "/agents/overdueaction/" | ||||
|  | ||||
| @@ -535,14 +599,14 @@ class TestAgentViews(TacticalTestCase): | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         agent = Agent.objects.get(pk=self.agent.pk) | ||||
|         self.assertTrue(agent.overdue_email_alert) | ||||
|         self.assertEqual(self.agent.hostname, r.data) | ||||
|         self.assertEqual(self.agent.hostname, r.data)  # type: ignore | ||||
|  | ||||
|         payload = {"pk": self.agent.pk, "overdue_text_alert": False} | ||||
|         r = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         agent = Agent.objects.get(pk=self.agent.pk) | ||||
|         self.assertFalse(agent.overdue_text_alert) | ||||
|         self.assertEqual(self.agent.hostname, r.data) | ||||
|         self.assertEqual(self.agent.hostname, r.data)  # type: ignore | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
| @@ -686,7 +750,7 @@ class TestAgentViews(TacticalTestCase): | ||||
|         nats_cmd.return_value = "ok" | ||||
|         r = self.client.get(url) | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertIn(self.agent.hostname, r.data) | ||||
|         self.assertIn(self.agent.hostname, r.data)  # type: ignore | ||||
|         nats_cmd.assert_called_with( | ||||
|             {"func": "recover", "payload": {"mode": "mesh"}}, timeout=45 | ||||
|         ) | ||||
| @@ -701,10 +765,82 @@ class TestAgentViews(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     @patch("agents.tasks.run_script_email_results_task.delay") | ||||
|     @patch("agents.models.Agent.run_script") | ||||
|     def test_run_script(self, run_script, email_task): | ||||
|         run_script.return_value = "ok" | ||||
|         url = "/agents/runscript/" | ||||
|         script = baker.make_recipe("scripts.script") | ||||
|  | ||||
|         # test wait | ||||
|         data = { | ||||
|             "pk": self.agent.pk, | ||||
|             "scriptPK": script.pk, | ||||
|             "output": "wait", | ||||
|             "args": [], | ||||
|             "timeout": 15, | ||||
|         } | ||||
|  | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         run_script.assert_called_with( | ||||
|             scriptpk=script.pk, args=[], timeout=18, wait=True | ||||
|         ) | ||||
|         run_script.reset_mock() | ||||
|  | ||||
|         # test email default | ||||
|         data = { | ||||
|             "pk": self.agent.pk, | ||||
|             "scriptPK": script.pk, | ||||
|             "output": "email", | ||||
|             "args": ["abc", "123"], | ||||
|             "timeout": 15, | ||||
|             "emailmode": "default", | ||||
|             "emails": ["admin@example.com", "bob@example.com"], | ||||
|         } | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         email_task.assert_called_with( | ||||
|             agentpk=self.agent.pk, | ||||
|             scriptpk=script.pk, | ||||
|             nats_timeout=18, | ||||
|             emails=[], | ||||
|             args=["abc", "123"], | ||||
|         ) | ||||
|         email_task.reset_mock() | ||||
|  | ||||
|         # test email overrides | ||||
|         data["emailmode"] = "custom" | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         email_task.assert_called_with( | ||||
|             agentpk=self.agent.pk, | ||||
|             scriptpk=script.pk, | ||||
|             nats_timeout=18, | ||||
|             emails=["admin@example.com", "bob@example.com"], | ||||
|             args=["abc", "123"], | ||||
|         ) | ||||
|  | ||||
|         # test fire and forget | ||||
|         data = { | ||||
|             "pk": self.agent.pk, | ||||
|             "scriptPK": script.pk, | ||||
|             "output": "forget", | ||||
|             "args": ["hello", "world"], | ||||
|             "timeout": 22, | ||||
|         } | ||||
|  | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         run_script.assert_called_with( | ||||
|             scriptpk=script.pk, args=["hello", "world"], timeout=25 | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestAgentViewsNew(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|  | ||||
|     def test_agent_counts(self): | ||||
|         url = "/agents/agent_counts/" | ||||
| @@ -715,15 +851,12 @@ class TestAgentViewsNew(TacticalTestCase): | ||||
|             monitoring_type=cycle(["server", "workstation"]), | ||||
|             _quantity=6, | ||||
|         ) | ||||
|         agents = baker.make_recipe( | ||||
|         baker.make_recipe( | ||||
|             "agents.overdue_agent", | ||||
|             monitoring_type=cycle(["server", "workstation"]), | ||||
|             _quantity=6, | ||||
|         ) | ||||
|  | ||||
|         # make an AgentOutage for every overdue agent | ||||
|         baker.make("agents.AgentOutage", agent=cycle(agents), _quantity=6) | ||||
|  | ||||
|         # returned data should be this | ||||
|         data = { | ||||
|             "total_server_count": 6, | ||||
| @@ -734,7 +867,7 @@ class TestAgentViewsNew(TacticalTestCase): | ||||
|  | ||||
|         r = self.client.post(url, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertEqual(r.data, data) | ||||
|         self.assertEqual(r.data, data)  # type: ignore | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
| @@ -746,14 +879,14 @@ class TestAgentViewsNew(TacticalTestCase): | ||||
|         agent = baker.make_recipe("agents.agent", site=site) | ||||
|  | ||||
|         # Test client toggle maintenance mode | ||||
|         data = {"type": "Client", "id": site.client.id, "action": True} | ||||
|         data = {"type": "Client", "id": site.client.id, "action": True}  # type: ignore | ||||
|  | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         self.assertTrue(Agent.objects.get(pk=agent.pk).maintenance_mode) | ||||
|  | ||||
|         # Test site toggle maintenance mode | ||||
|         data = {"type": "Site", "id": site.id, "action": False} | ||||
|         data = {"type": "Site", "id": site.id, "action": False}  # type: ignore | ||||
|  | ||||
|         r = self.client.post(url, data, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|   | ||||
| @@ -1,12 +1,11 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path("listagents/", views.AgentsTableList.as_view()), | ||||
|     path("listagentsnodetail/", views.list_agents_no_detail), | ||||
|     path("<int:pk>/agenteditdetails/", views.agent_edit_details), | ||||
|     path("byclient/<int:clientpk>/", views.by_client), | ||||
|     path("bysite/<int:sitepk>/", views.by_site), | ||||
|     path("overdueaction/", views.overdue_action), | ||||
|     path("sendrawcmd/", views.send_raw_cmd), | ||||
|     path("<pk>/agentdetail/", views.agent_detail), | ||||
|   | ||||
| @@ -1,47 +1,45 @@ | ||||
| import asyncio | ||||
| from loguru import logger | ||||
| import datetime as dt | ||||
| import os | ||||
| import subprocess | ||||
| import pytz | ||||
| import random | ||||
| import string | ||||
| import datetime as dt | ||||
| from packaging import version as pyver | ||||
| from typing import List | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.core.paginator import Paginator | ||||
| from django.db.models import Q | ||||
| from django.http import HttpResponse | ||||
|  | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from loguru import logger | ||||
| from packaging import version as pyver | ||||
| from rest_framework import status | ||||
| from rest_framework.decorators import api_view | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| from rest_framework import status, generics | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from .models import Agent, AgentOutage, RecoveryAction, Note | ||||
| from core.models import CoreSettings | ||||
| from scripts.models import Script | ||||
| from logs.models import AuditLog, PendingAction | ||||
|  | ||||
| from .serializers import ( | ||||
|     AgentSerializer, | ||||
|     AgentHostnameSerializer, | ||||
|     AgentTableSerializer, | ||||
|     AgentEditSerializer, | ||||
|     NoteSerializer, | ||||
|     NotesSerializer, | ||||
|     AgentOverdueActionSerializer, | ||||
| from scripts.models import Script | ||||
| from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task | ||||
| from tacticalrmm.utils import ( | ||||
|     generate_installer_exe, | ||||
|     get_default_timezone, | ||||
|     notify_error, | ||||
|     reload_nats, | ||||
| ) | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .tasks import ( | ||||
|     send_agent_update_task, | ||||
|     run_script_email_results_task, | ||||
| ) | ||||
| from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task | ||||
| from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task | ||||
|  | ||||
| from tacticalrmm.utils import notify_error, reload_nats | ||||
| from .models import Agent, Note, RecoveryAction | ||||
| from .serializers import ( | ||||
|     AgentEditSerializer, | ||||
|     AgentHostnameSerializer, | ||||
|     AgentOverdueActionSerializer, | ||||
|     AgentSerializer, | ||||
|     AgentTableSerializer, | ||||
|     NoteSerializer, | ||||
|     NotesSerializer, | ||||
| ) | ||||
| from .tasks import run_script_email_results_task, send_agent_update_task | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| @@ -60,7 +58,7 @@ def get_agent_versions(request): | ||||
| @api_view(["POST"]) | ||||
| def update_agents(request): | ||||
|     q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version") | ||||
|     pks: List[int] = [ | ||||
|     pks: list[int] = [ | ||||
|         i.pk | ||||
|         for i in q | ||||
|         if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER) | ||||
| @@ -97,22 +95,17 @@ def uninstall(request): | ||||
| def edit_agent(request): | ||||
|     agent = get_object_or_404(Agent, pk=request.data["id"]) | ||||
|  | ||||
|     old_site = agent.site.pk | ||||
|     a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True) | ||||
|     a_serializer.is_valid(raise_exception=True) | ||||
|     a_serializer.save() | ||||
|  | ||||
|     policy = agent.winupdatepolicy.get() | ||||
|     p_serializer = WinUpdatePolicySerializer( | ||||
|         instance=policy, data=request.data["winupdatepolicy"][0] | ||||
|     ) | ||||
|     p_serializer.is_valid(raise_exception=True) | ||||
|     p_serializer.save() | ||||
|  | ||||
|     # check if site changed and initiate generating correct policies | ||||
|     if old_site != request.data["site"]: | ||||
|         agent.generate_checks_from_policies() | ||||
|         agent.generate_tasks_from_policies() | ||||
|     if "winupdatepolicy" in request.data.keys(): | ||||
|         policy = agent.winupdatepolicy.get()  # type: ignore | ||||
|         p_serializer = WinUpdatePolicySerializer( | ||||
|             instance=policy, data=request.data["winupdatepolicy"][0] | ||||
|         ) | ||||
|         p_serializer.is_valid(raise_exception=True) | ||||
|         p_serializer.save() | ||||
|  | ||||
|     return Response("ok") | ||||
|  | ||||
| @@ -233,38 +226,74 @@ def send_raw_cmd(request): | ||||
|     return Response(r) | ||||
|  | ||||
|  | ||||
| class AgentsTableList(generics.ListAPIView): | ||||
|     queryset = ( | ||||
|         Agent.objects.select_related("site") | ||||
|         .prefetch_related("agentchecks") | ||||
|         .only( | ||||
|             "pk", | ||||
|             "hostname", | ||||
|             "agent_id", | ||||
|             "site", | ||||
|             "monitoring_type", | ||||
|             "description", | ||||
|             "needs_reboot", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_time", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "logged_in_username", | ||||
|             "last_logged_in_user", | ||||
|             "time_zone", | ||||
|             "maintenance_mode", | ||||
|         ) | ||||
|     ) | ||||
|     serializer_class = AgentTableSerializer | ||||
| class AgentsTableList(APIView): | ||||
|     def patch(self, request): | ||||
|         pagination = request.data["pagination"] | ||||
|         monType = request.data["monType"] | ||||
|         client = Q() | ||||
|         site = Q() | ||||
|         mon_type = Q() | ||||
|  | ||||
|     def list(self, request): | ||||
|         queryset = self.get_queryset() | ||||
|         ctx = { | ||||
|             "default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone) | ||||
|         } | ||||
|         serializer = AgentTableSerializer(queryset, many=True, context=ctx) | ||||
|         return Response(serializer.data) | ||||
|         if pagination["sortBy"] == "agentstatus": | ||||
|             sort = "last_seen" | ||||
|         elif pagination["sortBy"] == "client_name": | ||||
|             sort = "site__client__name" | ||||
|         elif pagination["sortBy"] == "site_name": | ||||
|             sort = "site__name" | ||||
|         elif pagination["sortBy"] == "user": | ||||
|             sort = "logged_in_username" | ||||
|         else: | ||||
|             sort = pagination["sortBy"] | ||||
|  | ||||
|         order_by = f"-{sort}" if pagination["descending"] else sort | ||||
|  | ||||
|         if monType == "server": | ||||
|             mon_type = Q(monitoring_type="server") | ||||
|         elif monType == "workstation": | ||||
|             mon_type = Q(monitoring_type="workstation") | ||||
|  | ||||
|         if "clientPK" in request.data: | ||||
|             client = Q(site__client_id=request.data["clientPK"]) | ||||
|  | ||||
|         if "sitePK" in request.data: | ||||
|             site = Q(site_id=request.data["sitePK"]) | ||||
|  | ||||
|         queryset = ( | ||||
|             Agent.objects.select_related("site") | ||||
|             .prefetch_related("agentchecks") | ||||
|             .filter(mon_type) | ||||
|             .filter(client) | ||||
|             .filter(site) | ||||
|             .only( | ||||
|                 "pk", | ||||
|                 "hostname", | ||||
|                 "agent_id", | ||||
|                 "site", | ||||
|                 "monitoring_type", | ||||
|                 "description", | ||||
|                 "needs_reboot", | ||||
|                 "overdue_text_alert", | ||||
|                 "overdue_email_alert", | ||||
|                 "overdue_time", | ||||
|                 "offline_time", | ||||
|                 "last_seen", | ||||
|                 "boot_time", | ||||
|                 "logged_in_username", | ||||
|                 "last_logged_in_user", | ||||
|                 "time_zone", | ||||
|                 "maintenance_mode", | ||||
|             ) | ||||
|             .order_by(order_by) | ||||
|         ) | ||||
|         paginator = Paginator(queryset, pagination["rowsPerPage"]) | ||||
|  | ||||
|         ctx = {"default_tz": get_default_timezone()} | ||||
|         serializer = AgentTableSerializer( | ||||
|             paginator.get_page(pagination["page"]), many=True, context=ctx | ||||
|         ) | ||||
|  | ||||
|         ret = {"agents": serializer.data, "total": paginator.count} | ||||
|         return Response(ret) | ||||
|  | ||||
|  | ||||
| @api_view() | ||||
| @@ -279,64 +308,6 @@ def agent_edit_details(request, pk): | ||||
|     return Response(AgentEditSerializer(agent).data) | ||||
|  | ||||
|  | ||||
| @api_view() | ||||
| def by_client(request, clientpk): | ||||
|     agents = ( | ||||
|         Agent.objects.select_related("site") | ||||
|         .filter(site__client_id=clientpk) | ||||
|         .prefetch_related("agentchecks") | ||||
|         .only( | ||||
|             "pk", | ||||
|             "hostname", | ||||
|             "agent_id", | ||||
|             "site", | ||||
|             "monitoring_type", | ||||
|             "description", | ||||
|             "needs_reboot", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_time", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "logged_in_username", | ||||
|             "last_logged_in_user", | ||||
|             "time_zone", | ||||
|             "maintenance_mode", | ||||
|         ) | ||||
|     ) | ||||
|     ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)} | ||||
|     return Response(AgentTableSerializer(agents, many=True, context=ctx).data) | ||||
|  | ||||
|  | ||||
| @api_view() | ||||
| def by_site(request, sitepk): | ||||
|     agents = ( | ||||
|         Agent.objects.filter(site_id=sitepk) | ||||
|         .select_related("site") | ||||
|         .prefetch_related("agentchecks") | ||||
|         .only( | ||||
|             "pk", | ||||
|             "hostname", | ||||
|             "agent_id", | ||||
|             "site", | ||||
|             "monitoring_type", | ||||
|             "description", | ||||
|             "needs_reboot", | ||||
|             "overdue_text_alert", | ||||
|             "overdue_email_alert", | ||||
|             "overdue_time", | ||||
|             "last_seen", | ||||
|             "boot_time", | ||||
|             "logged_in_username", | ||||
|             "last_logged_in_user", | ||||
|             "time_zone", | ||||
|             "maintenance_mode", | ||||
|         ) | ||||
|     ) | ||||
|     ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)} | ||||
|     return Response(AgentTableSerializer(agents, many=True, context=ctx).data) | ||||
|  | ||||
|  | ||||
| @api_view(["POST"]) | ||||
| def overdue_action(request): | ||||
|     agent = get_object_or_404(Agent, pk=request.data["pk"]) | ||||
| @@ -438,124 +409,20 @@ def install_agent(request): | ||||
|     ) | ||||
|  | ||||
|     if request.data["installMethod"] == "exe": | ||||
|         go_bin = "/usr/local/rmmgo/go/bin/go" | ||||
|  | ||||
|         if not os.path.exists(go_bin): | ||||
|             return Response("nogolang", status=status.HTTP_409_CONFLICT) | ||||
|  | ||||
|         api = request.data["api"] | ||||
|         atype = request.data["agenttype"] | ||||
|         rdp = request.data["rdp"] | ||||
|         ping = request.data["ping"] | ||||
|         power = request.data["power"] | ||||
|  | ||||
|         file_name = "rmm-installer.exe" | ||||
|         exe = os.path.join(settings.EXE_DIR, file_name) | ||||
|  | ||||
|         if os.path.exists(exe): | ||||
|             try: | ||||
|                 os.remove(exe) | ||||
|             except Exception as e: | ||||
|                 logger.error(str(e)) | ||||
|  | ||||
|         goarch = "amd64" if arch == "64" else "386" | ||||
|         cmd = [ | ||||
|             "env", | ||||
|             "GOOS=windows", | ||||
|             f"GOARCH={goarch}", | ||||
|             go_bin, | ||||
|             "build", | ||||
|             f"-ldflags=\"-s -w -X 'main.Inno={inno}'", | ||||
|             f"-X 'main.Api={api}'", | ||||
|             f"-X 'main.Client={client_id}'", | ||||
|             f"-X 'main.Site={site_id}'", | ||||
|             f"-X 'main.Atype={atype}'", | ||||
|             f"-X 'main.Rdp={rdp}'", | ||||
|             f"-X 'main.Ping={ping}'", | ||||
|             f"-X 'main.Power={power}'", | ||||
|             f"-X 'main.DownloadUrl={download_url}'", | ||||
|             f"-X 'main.Token={token}'\"", | ||||
|             "-o", | ||||
|             exe, | ||||
|         ] | ||||
|  | ||||
|         build_error = False | ||||
|         gen_error = False | ||||
|  | ||||
|         gen = [ | ||||
|             "env", | ||||
|             "GOOS=windows", | ||||
|             f"GOARCH={goarch}", | ||||
|             go_bin, | ||||
|             "generate", | ||||
|         ] | ||||
|         try: | ||||
|             r1 = subprocess.run( | ||||
|                 " ".join(gen), | ||||
|                 capture_output=True, | ||||
|                 shell=True, | ||||
|                 cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"), | ||||
|             ) | ||||
|         except Exception as e: | ||||
|             gen_error = True | ||||
|             logger.error(str(e)) | ||||
|             return Response( | ||||
|                 "genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE | ||||
|             ) | ||||
|  | ||||
|         if r1.returncode != 0: | ||||
|             gen_error = True | ||||
|             if r1.stdout: | ||||
|                 logger.error(r1.stdout.decode("utf-8", errors="ignore")) | ||||
|  | ||||
|             if r1.stderr: | ||||
|                 logger.error(r1.stderr.decode("utf-8", errors="ignore")) | ||||
|  | ||||
|             logger.error(f"Go build failed with return code {r1.returncode}") | ||||
|  | ||||
|         if gen_error: | ||||
|             return Response( | ||||
|                 "genfailed", status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE | ||||
|             ) | ||||
|  | ||||
|         try: | ||||
|             r = subprocess.run( | ||||
|                 " ".join(cmd), | ||||
|                 capture_output=True, | ||||
|                 shell=True, | ||||
|                 cwd=os.path.join(settings.BASE_DIR, "core/goinstaller"), | ||||
|             ) | ||||
|         except Exception as e: | ||||
|             build_error = True | ||||
|             logger.error(str(e)) | ||||
|             return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED) | ||||
|  | ||||
|         if r.returncode != 0: | ||||
|             build_error = True | ||||
|             if r.stdout: | ||||
|                 logger.error(r.stdout.decode("utf-8", errors="ignore")) | ||||
|  | ||||
|             if r.stderr: | ||||
|                 logger.error(r.stderr.decode("utf-8", errors="ignore")) | ||||
|  | ||||
|             logger.error(f"Go build failed with return code {r.returncode}") | ||||
|  | ||||
|         if build_error: | ||||
|             return Response("buildfailed", status=status.HTTP_412_PRECONDITION_FAILED) | ||||
|  | ||||
|         if settings.DEBUG: | ||||
|             with open(exe, "rb") as f: | ||||
|                 response = HttpResponse( | ||||
|                     f.read(), | ||||
|                     content_type="application/vnd.microsoft.portable-executable", | ||||
|                 ) | ||||
|                 response["Content-Disposition"] = f"inline; filename={file_name}" | ||||
|                 return response | ||||
|         else: | ||||
|             response = HttpResponse() | ||||
|             response["Content-Disposition"] = f"attachment; filename={file_name}" | ||||
|             response["X-Accel-Redirect"] = f"/private/exe/{file_name}" | ||||
|             return response | ||||
|         return generate_installer_exe( | ||||
|             file_name="rmm-installer.exe", | ||||
|             goarch="amd64" if arch == "64" else "386", | ||||
|             inno=inno, | ||||
|             api=request.data["api"], | ||||
|             client_id=client_id, | ||||
|             site_id=site_id, | ||||
|             atype=request.data["agenttype"], | ||||
|             rdp=request.data["rdp"], | ||||
|             ping=request.data["ping"], | ||||
|             power=request.data["power"], | ||||
|             download_url=download_url, | ||||
|             token=token, | ||||
|         ) | ||||
|  | ||||
|     elif request.data["installMethod"] == "manual": | ||||
|         cmd = [ | ||||
| @@ -563,12 +430,10 @@ def install_agent(request): | ||||
|             "/VERYSILENT", | ||||
|             "/SUPPRESSMSGBOXES", | ||||
|             "&&", | ||||
|             "timeout", | ||||
|             "/t", | ||||
|             "10", | ||||
|             "/nobreak", | ||||
|             ">", | ||||
|             "NUL", | ||||
|             "ping", | ||||
|             "127.0.0.1", | ||||
|             "-n", | ||||
|             "5", | ||||
|             "&&", | ||||
|             r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"', | ||||
|             "-m", | ||||
| @@ -666,7 +531,7 @@ def recover(request): | ||||
|             if r == "ok": | ||||
|                 return Response("Successfully completed recovery") | ||||
|  | ||||
|     if agent.recoveryactions.filter(last_run=None).exists(): | ||||
|     if agent.recoveryactions.filter(last_run=None).exists():  # type: ignore | ||||
|         return notify_error( | ||||
|             "A recovery action is currently pending. Please wait for the next agent check-in." | ||||
|         ) | ||||
| @@ -694,10 +559,9 @@ def recover(request): | ||||
| @api_view(["POST"]) | ||||
| def run_script(request): | ||||
|     agent = get_object_or_404(Agent, pk=request.data["pk"]) | ||||
|     if not agent.has_nats: | ||||
|         return notify_error("Requires agent version 1.1.0 or greater") | ||||
|     script = get_object_or_404(Script, pk=request.data["scriptPK"]) | ||||
|     output = request.data["output"] | ||||
|     args = request.data["args"] | ||||
|     req_timeout = int(request.data["timeout"]) + 3 | ||||
|  | ||||
|     AuditLog.audit_script_run( | ||||
| @@ -706,23 +570,13 @@ def run_script(request): | ||||
|         script=script.name, | ||||
|     ) | ||||
|  | ||||
|     data = { | ||||
|         "func": "runscript", | ||||
|         "timeout": request.data["timeout"], | ||||
|         "script_args": request.data["args"], | ||||
|         "payload": { | ||||
|             "code": script.code, | ||||
|             "shell": script.shell, | ||||
|         }, | ||||
|     } | ||||
|  | ||||
|     if output == "wait": | ||||
|         r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout)) | ||||
|         r = agent.run_script( | ||||
|             scriptpk=script.pk, args=args, timeout=req_timeout, wait=True | ||||
|         ) | ||||
|         return Response(r) | ||||
|     elif output == "email": | ||||
|         if not pyver.parse(agent.version) >= pyver.parse("1.1.12"): | ||||
|             return notify_error("Requires agent version 1.1.12 or greater") | ||||
|  | ||||
|     elif output == "email": | ||||
|         emails = ( | ||||
|             [] if request.data["emailmode"] == "default" else request.data["emails"] | ||||
|         ) | ||||
| @@ -730,13 +584,13 @@ def run_script(request): | ||||
|             agentpk=agent.pk, | ||||
|             scriptpk=script.pk, | ||||
|             nats_timeout=req_timeout, | ||||
|             nats_data=data, | ||||
|             emails=emails, | ||||
|             args=args, | ||||
|         ) | ||||
|         return Response(f"{script.name} will now be run on {agent.hostname}") | ||||
|     else: | ||||
|         asyncio.run(agent.nats_cmd(data, wait=False)) | ||||
|         return Response(f"{script.name} will now be run on {agent.hostname}") | ||||
|         agent.run_script(scriptpk=script.pk, args=args, timeout=req_timeout) | ||||
|  | ||||
|     return Response(f"{script.name} will now be run on {agent.hostname}") | ||||
|  | ||||
|  | ||||
| @api_view() | ||||
| @@ -826,7 +680,7 @@ def bulk(request): | ||||
|     elif request.data["monType"] == "workstations": | ||||
|         q = q.filter(monitoring_type="workstation") | ||||
|  | ||||
|     agents: List[int] = [agent.pk for agent in q] | ||||
|     agents: list[int] = [agent.pk for agent in q] | ||||
|  | ||||
|     AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data) | ||||
|  | ||||
| @@ -857,20 +711,43 @@ def bulk(request): | ||||
|  | ||||
| @api_view(["POST"]) | ||||
| def agent_counts(request): | ||||
|  | ||||
|     server_offline_count = len( | ||||
|         [ | ||||
|             agent | ||||
|             for agent in Agent.objects.filter(monitoring_type="server").only( | ||||
|                 "pk", | ||||
|                 "last_seen", | ||||
|                 "overdue_time", | ||||
|                 "offline_time", | ||||
|             ) | ||||
|             if not agent.status == "online" | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|     workstation_offline_count = len( | ||||
|         [ | ||||
|             agent | ||||
|             for agent in Agent.objects.filter(monitoring_type="workstation").only( | ||||
|                 "pk", | ||||
|                 "last_seen", | ||||
|                 "overdue_time", | ||||
|                 "offline_time", | ||||
|             ) | ||||
|             if not agent.status == "online" | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|     return Response( | ||||
|         { | ||||
|             "total_server_count": Agent.objects.filter( | ||||
|                 monitoring_type="server" | ||||
|             ).count(), | ||||
|             "total_server_offline_count": AgentOutage.objects.filter( | ||||
|                 recovery_time=None, agent__monitoring_type="server" | ||||
|             ).count(), | ||||
|             "total_server_offline_count": server_offline_count, | ||||
|             "total_workstation_count": Agent.objects.filter( | ||||
|                 monitoring_type="workstation" | ||||
|             ).count(), | ||||
|             "total_workstation_offline_count": AgentOutage.objects.filter( | ||||
|                 recovery_time=None, agent__monitoring_type="workstation" | ||||
|             ).count(), | ||||
|             "total_workstation_offline_count": workstation_offline_count, | ||||
|         } | ||||
|     ) | ||||
|  | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import Alert | ||||
|  | ||||
| from .models import Alert, AlertTemplate | ||||
|  | ||||
| admin.site.register(Alert) | ||||
| admin.site.register(AlertTemplate) | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.1 on 2020-08-15 15:31 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
| @@ -42,4 +42,4 @@ class Migration(migrations.Migration): | ||||
|                 ), | ||||
|             ], | ||||
|         ), | ||||
|     ] | ||||
|     ] | ||||
| @@ -27,4 +27,4 @@ class Migration(migrations.Migration): | ||||
|                 max_length=100, | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
|     ] | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.1.2 on 2020-10-21 18:15 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
| @@ -28,4 +28,4 @@ class Migration(migrations.Migration): | ||||
|             name="alert_time", | ||||
|             field=models.DateTimeField(auto_now_add=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
|     ] | ||||
							
								
								
									
										172
									
								
								api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										172
									
								
								api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,172 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 14:08 | ||||
|  | ||||
| import django.contrib.postgres.fields | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('agents', '0029_delete_agentoutage'), | ||||
|         ('clients', '0008_auto_20201103_1430'), | ||||
|         ('autotasks', '0017_auto_20210210_1512'), | ||||
|         ('scripts', '0005_auto_20201207_1606'), | ||||
|         ('alerts', '0003_auto_20201021_1815'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_execution_time', | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_retcode', | ||||
|             field=models.IntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_run', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_stderr', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_stdout', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='action_timeout', | ||||
|             field=models.PositiveIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='alert_type', | ||||
|             field=models.CharField(choices=[('availability', 'Availability'), ('check', 'Check'), ('task', 'Task'), ('custom', 'Custom')], default='availability', max_length=20), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='assigned_task', | ||||
|             field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='autotasks.automatedtask'), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='hidden', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_execution_time', | ||||
|             field=models.CharField(blank=True, max_length=100, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_retcode', | ||||
|             field=models.IntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_run', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_stderr', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_stdout', | ||||
|             field=models.TextField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_timeout', | ||||
|             field=models.PositiveIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_on', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='resolved_sms_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='sms_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alert', | ||||
|             name='snoozed', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alert', | ||||
|             name='severity', | ||||
|             field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name='AlertTemplate', | ||||
|             fields=[ | ||||
|                 ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), | ||||
|                 ('name', models.CharField(max_length=100)), | ||||
|                 ('is_active', models.BooleanField(default=True)), | ||||
|                 ('action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)), | ||||
|                 ('resolved_action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)), | ||||
|                 ('email_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)), | ||||
|                 ('text_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)), | ||||
|                 ('email_from', models.EmailField(blank=True, max_length=254, null=True)), | ||||
|                 ('agent_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_include_desktops', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_always_email', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_always_text', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_always_alert', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('agent_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)), | ||||
|                 ('check_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('check_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('check_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('check_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_always_email', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_always_text', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_always_alert', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('check_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)), | ||||
|                 ('task_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('task_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('task_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)), | ||||
|                 ('task_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_always_email', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_always_text', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_always_alert', models.BooleanField(blank=True, default=False, null=True)), | ||||
|                 ('task_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)), | ||||
|                 ('action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alert_template', to='scripts.script')), | ||||
|                 ('excluded_agents', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='agents.Agent')), | ||||
|                 ('excluded_clients', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Client')), | ||||
|                 ('excluded_sites', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Site')), | ||||
|                 ('resolved_action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_alert_template', to='scripts.script')), | ||||
|             ], | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										31
									
								
								api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 17:45 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('alerts', '0004_auto_20210212_1408'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='alert', | ||||
|             name='action_timeout', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='alert', | ||||
|             name='resolved_action_timeout', | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alerttemplate', | ||||
|             name='action_timeout', | ||||
|             field=models.PositiveIntegerField(default=15), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alerttemplate', | ||||
|             name='resolved_action_timeout', | ||||
|             field=models.PositiveIntegerField(default=15), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										72
									
								
								api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| # Generated by Django 3.1.6 on 2021-02-17 17:36 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('alerts', '0005_auto_20210212_1745'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='alerttemplate', | ||||
|             name='agent_include_desktops', | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alerttemplate', | ||||
|             name='exclude_servers', | ||||
|             field=models.BooleanField(blank=True, default=False, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='alerttemplate', | ||||
|             name='exclude_workstations', | ||||
|             field=models.BooleanField(blank=True, default=False, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='agent_always_alert', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='agent_always_email', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='agent_always_text', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='check_always_alert', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='check_always_email', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='check_always_text', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='task_always_alert', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='task_always_email', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='alerttemplate', | ||||
|             name='task_always_text', | ||||
|             field=models.BooleanField(blank=True, default=None, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,5 +1,20 @@ | ||||
| from django.db import models | ||||
| from __future__ import annotations | ||||
|  | ||||
| from typing import TYPE_CHECKING, Union | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db import models | ||||
| from django.db.models.fields import BooleanField, PositiveIntegerField | ||||
| from django.utils import timezone as djangotime | ||||
| from loguru import logger | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from agents.models import Agent | ||||
|     from autotasks.models import AutomatedTask | ||||
|     from checks.models import Check | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| SEVERITY_CHOICES = [ | ||||
|     ("info", "Informational"), | ||||
| @@ -7,6 +22,13 @@ SEVERITY_CHOICES = [ | ||||
|     ("error", "Error"), | ||||
| ] | ||||
|  | ||||
| ALERT_TYPE_CHOICES = [ | ||||
|     ("availability", "Availability"), | ||||
|     ("check", "Check"), | ||||
|     ("task", "Task"), | ||||
|     ("custom", "Custom"), | ||||
| ] | ||||
|  | ||||
|  | ||||
| class Alert(models.Model): | ||||
|     agent = models.ForeignKey( | ||||
| @@ -23,21 +45,554 @@ class Alert(models.Model): | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|     assigned_task = models.ForeignKey( | ||||
|         "autotasks.AutomatedTask", | ||||
|         related_name="alert", | ||||
|         on_delete=models.CASCADE, | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|     alert_type = models.CharField( | ||||
|         max_length=20, choices=ALERT_TYPE_CHOICES, default="availability" | ||||
|     ) | ||||
|     message = models.TextField(null=True, blank=True) | ||||
|     alert_time = models.DateTimeField(auto_now_add=True, null=True) | ||||
|     alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True) | ||||
|     snoozed = models.BooleanField(default=False) | ||||
|     snooze_until = models.DateTimeField(null=True, blank=True) | ||||
|     resolved = models.BooleanField(default=False) | ||||
|     severity = models.CharField( | ||||
|         max_length=100, choices=SEVERITY_CHOICES, default="info" | ||||
|     resolved_on = models.DateTimeField(null=True, blank=True) | ||||
|     severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info") | ||||
|     email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     sms_sent = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_sms_sent = models.DateTimeField(null=True, blank=True) | ||||
|     hidden = models.BooleanField(default=False) | ||||
|     action_run = models.DateTimeField(null=True, blank=True) | ||||
|     action_stdout = models.TextField(null=True, blank=True) | ||||
|     action_stderr = models.TextField(null=True, blank=True) | ||||
|     action_retcode = models.IntegerField(null=True, blank=True) | ||||
|     action_execution_time = models.CharField(max_length=100, null=True, blank=True) | ||||
|     resolved_action_run = models.DateTimeField(null=True, blank=True) | ||||
|     resolved_action_stdout = models.TextField(null=True, blank=True) | ||||
|     resolved_action_stderr = models.TextField(null=True, blank=True) | ||||
|     resolved_action_retcode = models.IntegerField(null=True, blank=True) | ||||
|     resolved_action_execution_time = models.CharField( | ||||
|         max_length=100, null=True, blank=True | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.message | ||||
|  | ||||
|     @classmethod | ||||
|     def create_availability_alert(cls, agent): | ||||
|         pass | ||||
|     def resolve(self): | ||||
|         self.resolved = True | ||||
|         self.resolved_on = djangotime.now() | ||||
|         self.snoozed = False | ||||
|         self.snooze_until = None | ||||
|         self.save() | ||||
|  | ||||
|     @classmethod | ||||
|     def create_check_alert(cls, check): | ||||
|         pass | ||||
|     def create_or_return_availability_alert(cls, agent): | ||||
|         if not cls.objects.filter(agent=agent, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 agent=agent, | ||||
|                 alert_type="availability", | ||||
|                 severity="error", | ||||
|                 message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.", | ||||
|                 hidden=True, | ||||
|             ) | ||||
|         else: | ||||
|             return cls.objects.get(agent=agent, resolved=False) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_or_return_check_alert(cls, check): | ||||
|  | ||||
|         if not cls.objects.filter(assigned_check=check, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 assigned_check=check, | ||||
|                 alert_type="check", | ||||
|                 severity=check.alert_severity, | ||||
|                 message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.", | ||||
|                 hidden=True, | ||||
|             ) | ||||
|         else: | ||||
|             return cls.objects.get(assigned_check=check, resolved=False) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_or_return_task_alert(cls, task): | ||||
|  | ||||
|         if not cls.objects.filter(assigned_task=task, resolved=False).exists(): | ||||
|             return cls.objects.create( | ||||
|                 assigned_task=task, | ||||
|                 alert_type="task", | ||||
|                 severity=task.alert_severity, | ||||
|                 message=f"{task.agent.hostname} has task: {task.name} that failed.", | ||||
|                 hidden=True, | ||||
|             ) | ||||
|         else: | ||||
|             return cls.objects.get(assigned_task=task, resolved=False) | ||||
|  | ||||
|     @classmethod | ||||
|     def handle_alert_failure(cls, instance: Union[Agent, AutomatedTask, Check]) -> None: | ||||
|         from agents.models import Agent | ||||
|         from autotasks.models import AutomatedTask | ||||
|         from checks.models import Check | ||||
|  | ||||
|         # set variables | ||||
|         dashboard_severities = None | ||||
|         email_severities = None | ||||
|         text_severities = None | ||||
|         always_dashboard = None | ||||
|         always_email = None | ||||
|         always_text = None | ||||
|         alert_interval = None | ||||
|         email_task = None | ||||
|         text_task = None | ||||
|  | ||||
|         # check what the instance passed is | ||||
|         if isinstance(instance, Agent): | ||||
|             from agents.tasks import agent_outage_email_task, agent_outage_sms_task | ||||
|  | ||||
|             email_task = agent_outage_email_task | ||||
|             text_task = agent_outage_sms_task | ||||
|  | ||||
|             email_alert = instance.overdue_email_alert | ||||
|             text_alert = instance.overdue_text_alert | ||||
|             dashboard_alert = instance.overdue_dashboard_alert | ||||
|             alert_template = instance.get_alert_template() | ||||
|             maintenance_mode = instance.maintenance_mode | ||||
|             alert_severity = "error" | ||||
|             agent = instance | ||||
|  | ||||
|             # set alert_template settings | ||||
|             if alert_template: | ||||
|                 dashboard_severities = ["error"] | ||||
|                 email_severities = ["error"] | ||||
|                 text_severities = ["error"] | ||||
|                 always_dashboard = alert_template.agent_always_alert | ||||
|                 always_email = alert_template.agent_always_email | ||||
|                 always_text = alert_template.agent_always_text | ||||
|                 alert_interval = alert_template.agent_periodic_alert_days | ||||
|  | ||||
|             if instance.should_create_alert(alert_template): | ||||
|                 alert = cls.create_or_return_availability_alert(instance) | ||||
|             else: | ||||
|                 # check if there is an alert that exists | ||||
|                 if cls.objects.filter(agent=instance, resolved=False).exists(): | ||||
|                     alert = cls.objects.get(agent=instance, resolved=False) | ||||
|                 else: | ||||
|                     alert = None | ||||
|  | ||||
|         elif isinstance(instance, Check): | ||||
|             from checks.tasks import ( | ||||
|                 handle_check_email_alert_task, | ||||
|                 handle_check_sms_alert_task, | ||||
|             ) | ||||
|  | ||||
|             email_task = handle_check_email_alert_task | ||||
|             text_task = handle_check_sms_alert_task | ||||
|  | ||||
|             email_alert = instance.email_alert | ||||
|             text_alert = instance.text_alert | ||||
|             dashboard_alert = instance.dashboard_alert | ||||
|             alert_template = instance.agent.get_alert_template() | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             alert_severity = instance.alert_severity | ||||
|             agent = instance.agent | ||||
|  | ||||
|             # set alert_template settings | ||||
|             if alert_template: | ||||
|                 dashboard_severities = alert_template.check_dashboard_alert_severity | ||||
|                 email_severities = alert_template.check_email_alert_severity | ||||
|                 text_severities = alert_template.check_text_alert_severity | ||||
|                 always_dashboard = alert_template.check_always_alert | ||||
|                 always_email = alert_template.check_always_email | ||||
|                 always_text = alert_template.check_always_text | ||||
|                 alert_interval = alert_template.check_periodic_alert_days | ||||
|  | ||||
|             if instance.should_create_alert(alert_template): | ||||
|                 alert = cls.create_or_return_check_alert(instance) | ||||
|             else: | ||||
|                 # check if there is an alert that exists | ||||
|                 if cls.objects.filter(assigned_check=instance, resolved=False).exists(): | ||||
|                     alert = cls.objects.get(assigned_check=instance, resolved=False) | ||||
|                 else: | ||||
|                     alert = None | ||||
|  | ||||
|         elif isinstance(instance, AutomatedTask): | ||||
|             from autotasks.tasks import handle_task_email_alert, handle_task_sms_alert | ||||
|  | ||||
|             email_task = handle_task_email_alert | ||||
|             text_task = handle_task_sms_alert | ||||
|  | ||||
|             email_alert = instance.email_alert | ||||
|             text_alert = instance.text_alert | ||||
|             dashboard_alert = instance.dashboard_alert | ||||
|             alert_template = instance.agent.get_alert_template() | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             alert_severity = instance.alert_severity | ||||
|             agent = instance.agent | ||||
|  | ||||
|             # set alert_template settings | ||||
|             if alert_template: | ||||
|                 dashboard_severities = alert_template.task_dashboard_alert_severity | ||||
|                 email_severities = alert_template.task_email_alert_severity | ||||
|                 text_severities = alert_template.task_text_alert_severity | ||||
|                 always_dashboard = alert_template.task_always_alert | ||||
|                 always_email = alert_template.task_always_email | ||||
|                 always_text = alert_template.task_always_text | ||||
|                 alert_interval = alert_template.task_periodic_alert_days | ||||
|  | ||||
|             if instance.should_create_alert(alert_template): | ||||
|                 alert = cls.create_or_return_task_alert(instance) | ||||
|             else: | ||||
|                 # check if there is an alert that exists | ||||
|                 if cls.objects.filter(assigned_task=instance, resolved=False).exists(): | ||||
|                     alert = cls.objects.get(assigned_task=instance, resolved=False) | ||||
|                 else: | ||||
|                     alert = None | ||||
|         else: | ||||
|             return | ||||
|  | ||||
|         # return if agent is in maintenance mode | ||||
|         if maintenance_mode or not alert: | ||||
|             return | ||||
|  | ||||
|         # check if alert severity changed on check and update the alert | ||||
|         if alert_severity != alert.severity: | ||||
|             alert.severity = alert_severity | ||||
|             alert.save(update_fields=["severity"]) | ||||
|  | ||||
|         # create alert in dashboard if enabled | ||||
|         if dashboard_alert or always_dashboard: | ||||
|  | ||||
|             # check if alert template is set and specific severities are configured | ||||
|             if alert_template and alert.severity not in dashboard_severities:  # type: ignore | ||||
|                 pass | ||||
|             else: | ||||
|                 alert.hidden = False | ||||
|                 alert.save() | ||||
|  | ||||
|         # send email if enabled | ||||
|         if email_alert or always_email: | ||||
|  | ||||
|             # check if alert template is set and specific severities are configured | ||||
|             if alert_template and alert.severity not in email_severities:  # type: ignore | ||||
|                 pass | ||||
|             else: | ||||
|                 email_task.delay( | ||||
|                     pk=alert.pk, | ||||
|                     alert_interval=alert_interval, | ||||
|                 ) | ||||
|  | ||||
|         # send text if enabled | ||||
|         if text_alert or always_text: | ||||
|  | ||||
|             # check if alert template is set and specific severities are configured | ||||
|             if alert_template and alert.severity not in text_severities:  # type: ignore | ||||
|                 pass | ||||
|             else: | ||||
|                 text_task.delay(pk=alert.pk, alert_interval=alert_interval) | ||||
|  | ||||
|         # check if any scripts should be run | ||||
|         if alert_template and alert_template.action and not alert.action_run: | ||||
|             r = agent.run_script( | ||||
|                 scriptpk=alert_template.action.pk, | ||||
|                 args=alert_template.action_args, | ||||
|                 timeout=alert_template.action_timeout, | ||||
|                 wait=True, | ||||
|                 full=True, | ||||
|                 run_on_any=True, | ||||
|             ) | ||||
|  | ||||
|             # command was successful | ||||
|             if type(r) == dict: | ||||
|                 alert.action_retcode = r["retcode"] | ||||
|                 alert.action_stdout = r["stdout"] | ||||
|                 alert.action_stderr = r["stderr"] | ||||
|                 alert.action_execution_time = "{:.4f}".format(r["execution_time"]) | ||||
|                 alert.action_run = djangotime.now() | ||||
|                 alert.save() | ||||
|             else: | ||||
|                 logger.error( | ||||
|                     f"Failure action: {alert_template.action.name} failed to run on any agent for {agent.hostname} failure alert" | ||||
|                 ) | ||||
|  | ||||
|     @classmethod | ||||
|     def handle_alert_resolve(cls, instance: Union[Agent, AutomatedTask, Check]) -> None: | ||||
|         from agents.models import Agent | ||||
|         from autotasks.models import AutomatedTask | ||||
|         from checks.models import Check | ||||
|  | ||||
|         # set variables | ||||
|         email_on_resolved = False | ||||
|         text_on_resolved = False | ||||
|         resolved_email_task = None | ||||
|         resolved_text_task = None | ||||
|  | ||||
|         # check what the instance passed is | ||||
|         if isinstance(instance, Agent): | ||||
|             from agents.tasks import agent_recovery_email_task, agent_recovery_sms_task | ||||
|  | ||||
|             resolved_email_task = agent_recovery_email_task | ||||
|             resolved_text_task = agent_recovery_sms_task | ||||
|  | ||||
|             alert_template = instance.get_alert_template() | ||||
|             alert = cls.objects.get(agent=instance, resolved=False) | ||||
|             maintenance_mode = instance.maintenance_mode | ||||
|             agent = instance | ||||
|  | ||||
|             if alert_template: | ||||
|                 email_on_resolved = alert_template.agent_email_on_resolved | ||||
|                 text_on_resolved = alert_template.agent_text_on_resolved | ||||
|  | ||||
|         elif isinstance(instance, Check): | ||||
|             from checks.tasks import ( | ||||
|                 handle_resolved_check_email_alert_task, | ||||
|                 handle_resolved_check_sms_alert_task, | ||||
|             ) | ||||
|  | ||||
|             resolved_email_task = handle_resolved_check_email_alert_task | ||||
|             resolved_text_task = handle_resolved_check_sms_alert_task | ||||
|  | ||||
|             alert_template = instance.agent.get_alert_template() | ||||
|             alert = cls.objects.get(assigned_check=instance, resolved=False) | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             agent = instance.agent | ||||
|  | ||||
|             if alert_template: | ||||
|                 email_on_resolved = alert_template.check_email_on_resolved | ||||
|                 text_on_resolved = alert_template.check_text_on_resolved | ||||
|  | ||||
|         elif isinstance(instance, AutomatedTask): | ||||
|             from autotasks.tasks import ( | ||||
|                 handle_resolved_task_email_alert, | ||||
|                 handle_resolved_task_sms_alert, | ||||
|             ) | ||||
|  | ||||
|             resolved_email_task = handle_resolved_task_email_alert | ||||
|             resolved_text_task = handle_resolved_task_sms_alert | ||||
|  | ||||
|             alert_template = instance.agent.get_alert_template() | ||||
|             alert = cls.objects.get(assigned_task=instance, resolved=False) | ||||
|             maintenance_mode = instance.agent.maintenance_mode | ||||
|             agent = instance.agent | ||||
|  | ||||
|             if alert_template: | ||||
|                 email_on_resolved = alert_template.task_email_on_resolved | ||||
|                 text_on_resolved = alert_template.task_text_on_resolved | ||||
|  | ||||
|         else: | ||||
|             return | ||||
|  | ||||
|         # return if agent is in maintenance mode | ||||
|         if maintenance_mode: | ||||
|             return | ||||
|  | ||||
|         alert.resolve() | ||||
|  | ||||
|         # check if a resolved email notification should be send | ||||
|         if email_on_resolved and not alert.resolved_email_sent: | ||||
|             resolved_email_task.delay(pk=alert.pk) | ||||
|  | ||||
|         # check if resolved text should be sent | ||||
|         if text_on_resolved and not alert.resolved_sms_sent: | ||||
|             resolved_text_task.delay(pk=alert.pk) | ||||
|  | ||||
|         # check if resolved script should be run | ||||
|         if ( | ||||
|             alert_template | ||||
|             and alert_template.resolved_action | ||||
|             and not alert.resolved_action_run | ||||
|         ): | ||||
|             r = agent.run_script( | ||||
|                 scriptpk=alert_template.resolved_action.pk, | ||||
|                 args=alert_template.resolved_action_args, | ||||
|                 timeout=alert_template.resolved_action_timeout, | ||||
|                 wait=True, | ||||
|                 full=True, | ||||
|                 run_on_any=True, | ||||
|             ) | ||||
|  | ||||
|             # command was successful | ||||
|             if type(r) == dict: | ||||
|                 alert.resolved_action_retcode = r["retcode"] | ||||
|                 alert.resolved_action_stdout = r["stdout"] | ||||
|                 alert.resolved_action_stderr = r["stderr"] | ||||
|                 alert.resolved_action_execution_time = "{:.4f}".format( | ||||
|                     r["execution_time"] | ||||
|                 ) | ||||
|                 alert.resolved_action_run = djangotime.now() | ||||
|                 alert.save() | ||||
|             else: | ||||
|                 logger.error( | ||||
|                     f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert" | ||||
|                 ) | ||||
|  | ||||
|  | ||||
| class AlertTemplate(models.Model): | ||||
|     name = models.CharField(max_length=100) | ||||
|     is_active = models.BooleanField(default=True) | ||||
|  | ||||
|     action = models.ForeignKey( | ||||
|         "scripts.Script", | ||||
|         related_name="alert_template", | ||||
|         blank=True, | ||||
|         null=True, | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|     action_args = ArrayField( | ||||
|         models.CharField(max_length=255, null=True, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     action_timeout = models.PositiveIntegerField(default=15) | ||||
|     resolved_action = models.ForeignKey( | ||||
|         "scripts.Script", | ||||
|         related_name="resolved_alert_template", | ||||
|         blank=True, | ||||
|         null=True, | ||||
|         on_delete=models.SET_NULL, | ||||
|     ) | ||||
|     resolved_action_args = ArrayField( | ||||
|         models.CharField(max_length=255, null=True, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     resolved_action_timeout = models.PositiveIntegerField(default=15) | ||||
|  | ||||
|     # overrides the global recipients | ||||
|     email_recipients = ArrayField( | ||||
|         models.CharField(max_length=100, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     text_recipients = ArrayField( | ||||
|         models.CharField(max_length=100, blank=True), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|  | ||||
|     # overrides the from address | ||||
|     email_from = models.EmailField(blank=True, null=True) | ||||
|  | ||||
|     # agent alert settings | ||||
|     agent_email_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_text_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     agent_always_email = BooleanField(null=True, blank=True, default=None) | ||||
|     agent_always_text = BooleanField(null=True, blank=True, default=None) | ||||
|     agent_always_alert = BooleanField(null=True, blank=True, default=None) | ||||
|     agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) | ||||
|  | ||||
|     # check alert settings | ||||
|     check_email_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_text_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_dashboard_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     check_email_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     check_text_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     check_always_email = BooleanField(null=True, blank=True, default=None) | ||||
|     check_always_text = BooleanField(null=True, blank=True, default=None) | ||||
|     check_always_alert = BooleanField(null=True, blank=True, default=None) | ||||
|     check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) | ||||
|  | ||||
|     # task alert settings | ||||
|     task_email_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_text_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_dashboard_alert_severity = ArrayField( | ||||
|         models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES), | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     task_email_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     task_text_on_resolved = BooleanField(null=True, blank=True, default=False) | ||||
|     task_always_email = BooleanField(null=True, blank=True, default=None) | ||||
|     task_always_text = BooleanField(null=True, blank=True, default=None) | ||||
|     task_always_alert = BooleanField(null=True, blank=True, default=None) | ||||
|     task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0) | ||||
|  | ||||
|     # exclusion settings | ||||
|     exclude_workstations = BooleanField(null=True, blank=True, default=False) | ||||
|     exclude_servers = BooleanField(null=True, blank=True, default=False) | ||||
|  | ||||
|     excluded_sites = models.ManyToManyField( | ||||
|         "clients.Site", related_name="alert_exclusions", blank=True | ||||
|     ) | ||||
|     excluded_clients = models.ManyToManyField( | ||||
|         "clients.Client", related_name="alert_exclusions", blank=True | ||||
|     ) | ||||
|     excluded_agents = models.ManyToManyField( | ||||
|         "agents.Agent", related_name="alert_exclusions", blank=True | ||||
|     ) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.name | ||||
|  | ||||
|     @property | ||||
|     def has_agent_settings(self) -> bool: | ||||
|         return ( | ||||
|             self.agent_email_on_resolved | ||||
|             or self.agent_text_on_resolved | ||||
|             or self.agent_always_email | ||||
|             or self.agent_always_text | ||||
|             or self.agent_always_alert | ||||
|             or bool(self.agent_periodic_alert_days) | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def has_check_settings(self) -> bool: | ||||
|         return ( | ||||
|             bool(self.check_email_alert_severity) | ||||
|             or bool(self.check_text_alert_severity) | ||||
|             or bool(self.check_dashboard_alert_severity) | ||||
|             or self.check_email_on_resolved | ||||
|             or self.check_text_on_resolved | ||||
|             or self.check_always_email | ||||
|             or self.check_always_text | ||||
|             or self.check_always_alert | ||||
|             or bool(self.check_periodic_alert_days) | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def has_task_settings(self) -> bool: | ||||
|         return ( | ||||
|             bool(self.task_email_alert_severity) | ||||
|             or bool(self.task_text_alert_severity) | ||||
|             or bool(self.task_dashboard_alert_severity) | ||||
|             or self.task_email_on_resolved | ||||
|             or self.task_text_on_resolved | ||||
|             or self.task_always_email | ||||
|             or self.task_always_text | ||||
|             or self.task_always_alert | ||||
|             or bool(self.task_periodic_alert_days) | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def has_core_settings(self) -> bool: | ||||
|         return bool(self.email_from) or self.email_recipients or self.text_recipients | ||||
|  | ||||
|     @property | ||||
|     def is_default_template(self) -> bool: | ||||
|         return self.default_alert_template.exists()  # type: ignore | ||||
|   | ||||
| @@ -1,19 +1,121 @@ | ||||
| from rest_framework.serializers import ( | ||||
|     ModelSerializer, | ||||
|     ReadOnlyField, | ||||
|     DateTimeField, | ||||
| ) | ||||
| from rest_framework.fields import SerializerMethodField | ||||
| from rest_framework.serializers import ModelSerializer, ReadOnlyField | ||||
|  | ||||
| from .models import Alert | ||||
| from automation.serializers import PolicySerializer | ||||
| from clients.serializers import ClientSerializer, SiteSerializer | ||||
| from tacticalrmm.utils import get_default_timezone | ||||
|  | ||||
| from .models import Alert, AlertTemplate | ||||
|  | ||||
|  | ||||
| class AlertSerializer(ModelSerializer): | ||||
|  | ||||
|     hostname = ReadOnlyField(source="agent.hostname") | ||||
|     client = ReadOnlyField(source="agent.client") | ||||
|     site = ReadOnlyField(source="agent.site") | ||||
|     alert_time = DateTimeField(format="iso-8601") | ||||
|     hostname = SerializerMethodField(read_only=True) | ||||
|     client = SerializerMethodField(read_only=True) | ||||
|     site = SerializerMethodField(read_only=True) | ||||
|     alert_time = SerializerMethodField(read_only=True) | ||||
|     resolve_on = SerializerMethodField(read_only=True) | ||||
|     snoozed_until = SerializerMethodField(read_only=True) | ||||
|  | ||||
|     def get_hostname(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.hostname if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.hostname | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.hostname if instance.assigned_task else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_client(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.client.name if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.client.name | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.client.name | ||||
|                 if instance.assigned_task | ||||
|                 else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_site(self, instance): | ||||
|         if instance.alert_type == "availability": | ||||
|             return instance.agent.site.name if instance.agent else "" | ||||
|         elif instance.alert_type == "check": | ||||
|             return ( | ||||
|                 instance.assigned_check.agent.site.name | ||||
|                 if instance.assigned_check | ||||
|                 else "" | ||||
|             ) | ||||
|         elif instance.alert_type == "task": | ||||
|             return ( | ||||
|                 instance.assigned_task.agent.site.name if instance.assigned_task else "" | ||||
|             ) | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def get_alert_time(self, instance): | ||||
|         if instance.alert_time: | ||||
|             return instance.alert_time.astimezone(get_default_timezone()).timestamp() | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_resolve_on(self, instance): | ||||
|         if instance.resolved_on: | ||||
|             return instance.resolved_on.astimezone(get_default_timezone()).timestamp() | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_snoozed_until(self, instance): | ||||
|         if instance.snooze_until: | ||||
|             return instance.snooze_until.astimezone(get_default_timezone()).timestamp() | ||||
|         return None | ||||
|  | ||||
|     class Meta: | ||||
|         model = Alert | ||||
|         fields = "__all__" | ||||
|  | ||||
|  | ||||
| class AlertTemplateSerializer(ModelSerializer): | ||||
|     agent_settings = ReadOnlyField(source="has_agent_settings") | ||||
|     check_settings = ReadOnlyField(source="has_check_settings") | ||||
|     task_settings = ReadOnlyField(source="has_task_settings") | ||||
|     core_settings = ReadOnlyField(source="has_core_settings") | ||||
|     default_template = ReadOnlyField(source="is_default_template") | ||||
|     action_name = ReadOnlyField(source="action.name") | ||||
|     resolved_action_name = ReadOnlyField(source="resolved_action.name") | ||||
|     applied_count = SerializerMethodField() | ||||
|  | ||||
|     class Meta: | ||||
|         model = AlertTemplate | ||||
|         fields = "__all__" | ||||
|  | ||||
|     def get_applied_count(self, instance): | ||||
|         count = 0 | ||||
|         count += instance.policies.count() | ||||
|         count += instance.clients.count() | ||||
|         count += instance.sites.count() | ||||
|         return count | ||||
|  | ||||
|  | ||||
| class AlertTemplateRelationSerializer(ModelSerializer): | ||||
|     policies = PolicySerializer(read_only=True, many=True) | ||||
|     clients = ClientSerializer(read_only=True, many=True) | ||||
|     sites = SiteSerializer(read_only=True, many=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = AlertTemplate | ||||
|         fields = "__all__" | ||||
|   | ||||
							
								
								
									
										14
									
								
								api/tacticalrmm/alerts/tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								api/tacticalrmm/alerts/tasks.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from alerts.models import Alert | ||||
| from tacticalrmm.celery import app | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def unsnooze_alerts() -> str: | ||||
|  | ||||
|     Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update( | ||||
|         snoozed=False, snooze_until=None | ||||
|     ) | ||||
|  | ||||
|     return "ok" | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,7 +1,12 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path("alerts/", views.GetAddAlerts.as_view()), | ||||
|     path("bulk/", views.BulkAlerts.as_view()), | ||||
|     path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()), | ||||
|     path("alerttemplates/", views.GetAddAlertTemplates.as_view()), | ||||
|     path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()), | ||||
|     path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()), | ||||
| ] | ||||
|   | ||||
| @@ -1,19 +1,103 @@ | ||||
| from datetime import datetime as dt | ||||
|  | ||||
| from django.db.models import Q | ||||
| from django.shortcuts import get_object_or_404 | ||||
|  | ||||
| from rest_framework.views import APIView | ||||
| from django.utils import timezone as djangotime | ||||
| from rest_framework.response import Response | ||||
| from rest_framework import status | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from .models import Alert | ||||
| from tacticalrmm.utils import notify_error | ||||
|  | ||||
| from .serializers import AlertSerializer | ||||
| from .models import Alert, AlertTemplate | ||||
| from .serializers import ( | ||||
|     AlertSerializer, | ||||
|     AlertTemplateRelationSerializer, | ||||
|     AlertTemplateSerializer, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class GetAddAlerts(APIView): | ||||
|     def get(self, request): | ||||
|         alerts = Alert.objects.all() | ||||
|     def patch(self, request): | ||||
|  | ||||
|         return Response(AlertSerializer(alerts, many=True).data) | ||||
|         # top 10 alerts for dashboard icon | ||||
|         if "top" in request.data.keys(): | ||||
|             alerts = Alert.objects.filter( | ||||
|                 resolved=False, snoozed=False, hidden=False | ||||
|             ).order_by("alert_time")[: int(request.data["top"])] | ||||
|             count = Alert.objects.filter( | ||||
|                 resolved=False, snoozed=False, hidden=False | ||||
|             ).count() | ||||
|             return Response( | ||||
|                 { | ||||
|                     "alerts_count": count, | ||||
|                     "alerts": AlertSerializer(alerts, many=True).data, | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|         elif any( | ||||
|             key | ||||
|             in [ | ||||
|                 "timeFilter", | ||||
|                 "clientFilter", | ||||
|                 "severityFilter", | ||||
|                 "resolvedFilter", | ||||
|                 "snoozedFilter", | ||||
|             ] | ||||
|             for key in request.data.keys() | ||||
|         ): | ||||
|             clientFilter = Q() | ||||
|             severityFilter = Q() | ||||
|             timeFilter = Q() | ||||
|             resolvedFilter = Q() | ||||
|             snoozedFilter = Q() | ||||
|  | ||||
|             if ( | ||||
|                 "snoozedFilter" in request.data.keys() | ||||
|                 and not request.data["snoozedFilter"] | ||||
|             ): | ||||
|                 snoozedFilter = Q(snoozed=request.data["snoozedFilter"]) | ||||
|  | ||||
|             if ( | ||||
|                 "resolvedFilter" in request.data.keys() | ||||
|                 and not request.data["resolvedFilter"] | ||||
|             ): | ||||
|                 resolvedFilter = Q(resolved=request.data["resolvedFilter"]) | ||||
|  | ||||
|             if "clientFilter" in request.data.keys(): | ||||
|                 from agents.models import Agent | ||||
|                 from clients.models import Client | ||||
|  | ||||
|                 clients = Client.objects.filter( | ||||
|                     pk__in=request.data["clientFilter"] | ||||
|                 ).values_list("id") | ||||
|                 agents = Agent.objects.filter(site__client_id__in=clients).values_list( | ||||
|                     "id" | ||||
|                 ) | ||||
|  | ||||
|                 clientFilter = Q(agent__in=agents) | ||||
|  | ||||
|             if "severityFilter" in request.data.keys(): | ||||
|                 severityFilter = Q(severity__in=request.data["severityFilter"]) | ||||
|  | ||||
|             if "timeFilter" in request.data.keys(): | ||||
|                 timeFilter = Q( | ||||
|                     alert_time__lte=djangotime.make_aware(dt.today()), | ||||
|                     alert_time__gt=djangotime.make_aware(dt.today()) | ||||
|                     - djangotime.timedelta(days=int(request.data["timeFilter"])), | ||||
|                 ) | ||||
|  | ||||
|             alerts = ( | ||||
|                 Alert.objects.filter(clientFilter) | ||||
|                 .filter(severityFilter) | ||||
|                 .filter(resolvedFilter) | ||||
|                 .filter(snoozedFilter) | ||||
|                 .filter(timeFilter) | ||||
|             ) | ||||
|             return Response(AlertSerializer(alerts, many=True).data) | ||||
|  | ||||
|         else: | ||||
|             alerts = Alert.objects.all() | ||||
|             return Response(AlertSerializer(alerts, many=True).data) | ||||
|  | ||||
|     def post(self, request): | ||||
|         serializer = AlertSerializer(data=request.data, partial=True) | ||||
| @@ -32,7 +116,40 @@ class GetUpdateDeleteAlert(APIView): | ||||
|     def put(self, request, pk): | ||||
|         alert = get_object_or_404(Alert, pk=pk) | ||||
|  | ||||
|         serializer = AlertSerializer(instance=alert, data=request.data, partial=True) | ||||
|         data = request.data | ||||
|  | ||||
|         if "type" in data.keys(): | ||||
|             if data["type"] == "resolve": | ||||
|                 data = { | ||||
|                     "resolved": True, | ||||
|                     "resolved_on": djangotime.now(), | ||||
|                     "snoozed": False, | ||||
|                 } | ||||
|  | ||||
|                 # unable to set snooze_until to none in serialzier | ||||
|                 alert.snooze_until = None | ||||
|                 alert.save() | ||||
|             elif data["type"] == "snooze": | ||||
|                 if "snooze_days" in data.keys(): | ||||
|                     data = { | ||||
|                         "snoozed": True, | ||||
|                         "snooze_until": djangotime.now() | ||||
|                         + djangotime.timedelta(days=int(data["snooze_days"])), | ||||
|                     } | ||||
|                 else: | ||||
|                     return notify_error( | ||||
|                         "Missing 'snoozed_days' when trying to snooze alert" | ||||
|                     ) | ||||
|             elif data["type"] == "unsnooze": | ||||
|                 data = {"snoozed": False} | ||||
|  | ||||
|                 # unable to set snooze_until to none in serialzier | ||||
|                 alert.snooze_until = None | ||||
|                 alert.save() | ||||
|             else: | ||||
|                 return notify_error("There was an error in the request data") | ||||
|  | ||||
|         serializer = AlertSerializer(instance=alert, data=data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
| @@ -42,3 +159,68 @@ class GetUpdateDeleteAlert(APIView): | ||||
|         Alert.objects.get(pk=pk).delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class BulkAlerts(APIView): | ||||
|     def post(self, request): | ||||
|         if request.data["bulk_action"] == "resolve": | ||||
|             Alert.objects.filter(id__in=request.data["alerts"]).update( | ||||
|                 resolved=True, | ||||
|                 resolved_on=djangotime.now(), | ||||
|                 snoozed=False, | ||||
|                 snooze_until=None, | ||||
|             ) | ||||
|             return Response("ok") | ||||
|         elif request.data["bulk_action"] == "snooze": | ||||
|             if "snooze_days" in request.data.keys(): | ||||
|                 Alert.objects.filter(id__in=request.data["alerts"]).update( | ||||
|                     snoozed=True, | ||||
|                     snooze_until=djangotime.now() | ||||
|                     + djangotime.timedelta(days=int(request.data["snooze_days"])), | ||||
|                 ) | ||||
|                 return Response("ok") | ||||
|  | ||||
|         return notify_error("The request was invalid") | ||||
|  | ||||
|  | ||||
| class GetAddAlertTemplates(APIView): | ||||
|     def get(self, request): | ||||
|         alert_templates = AlertTemplate.objects.all() | ||||
|  | ||||
|         return Response(AlertTemplateSerializer(alert_templates, many=True).data) | ||||
|  | ||||
|     def post(self, request): | ||||
|         serializer = AlertTemplateSerializer(data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class GetUpdateDeleteAlertTemplate(APIView): | ||||
|     def get(self, request, pk): | ||||
|         alert_template = get_object_or_404(AlertTemplate, pk=pk) | ||||
|  | ||||
|         return Response(AlertTemplateSerializer(alert_template).data) | ||||
|  | ||||
|     def put(self, request, pk): | ||||
|         alert_template = get_object_or_404(AlertTemplate, pk=pk) | ||||
|  | ||||
|         serializer = AlertTemplateSerializer( | ||||
|             instance=alert_template, data=request.data, partial=True | ||||
|         ) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def delete(self, request, pk): | ||||
|         get_object_or_404(AlertTemplate, pk=pk).delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class RelatedAlertTemplate(APIView): | ||||
|     def get(self, request, pk): | ||||
|         alert_template = get_object_or_404(AlertTemplate, pk=pk) | ||||
|         return Response(AlertTemplateRelationSerializer(alert_template).data) | ||||
|   | ||||
| @@ -1,11 +1,12 @@ | ||||
| import os | ||||
| import json | ||||
| import os | ||||
| from itertools import cycle | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from django.conf import settings | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from unittest.mock import patch | ||||
| from model_bakery import baker | ||||
| from itertools import cycle | ||||
|  | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
|  | ||||
| class TestAPIv3(TacticalTestCase): | ||||
| @@ -52,3 +53,39 @@ class TestAPIv3(TacticalTestCase): | ||||
|             r.json(), | ||||
|             {"agent": self.agent.pk, "check_interval": self.agent.check_interval}, | ||||
|         ) | ||||
|  | ||||
|     def test_checkin_patch(self): | ||||
|         from logs.models import PendingAction | ||||
|  | ||||
|         url = "/api/v3/checkin/" | ||||
|         agent_updated = baker.make_recipe("agents.agent", version="1.3.0") | ||||
|         PendingAction.objects.create( | ||||
|             agent=agent_updated, | ||||
|             action_type="agentupdate", | ||||
|             details={ | ||||
|                 "url": agent_updated.winagent_dl, | ||||
|                 "version": agent_updated.version, | ||||
|                 "inno": agent_updated.win_inno_exe, | ||||
|             }, | ||||
|         ) | ||||
|         action = agent_updated.pendingactions.filter(action_type="agentupdate").first() | ||||
|         self.assertEqual(action.status, "pending") | ||||
|  | ||||
|         # test agent failed to update and still on same version | ||||
|         payload = { | ||||
|             "func": "hello", | ||||
|             "agent_id": agent_updated.agent_id, | ||||
|             "version": "1.3.0", | ||||
|         } | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         action = agent_updated.pendingactions.filter(action_type="agentupdate").first() | ||||
|         self.assertEqual(action.status, "pending") | ||||
|  | ||||
|         # test agent successful update | ||||
|         payload["version"] = settings.LATEST_AGENT_VER | ||||
|         r = self.client.patch(url, payload, format="json") | ||||
|         self.assertEqual(r.status_code, 200) | ||||
|         action = agent_updated.pendingactions.filter(action_type="agentupdate").first() | ||||
|         self.assertEqual(action.status, "completed") | ||||
|         action.delete() | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
| @@ -11,4 +12,10 @@ urlpatterns = [ | ||||
|     path("newagent/", views.NewAgent.as_view()), | ||||
|     path("software/", views.Software.as_view()), | ||||
|     path("installer/", views.Installer.as_view()), | ||||
|     path("checkin/", views.CheckIn.as_view()), | ||||
|     path("syncmesh/", views.SyncMeshNodeID.as_view()), | ||||
|     path("choco/", views.Choco.as_view()), | ||||
|     path("winupdates/", views.WinUpdates.as_view()), | ||||
|     path("superseded/", views.SupersededWinUpdate.as_view()), | ||||
|     path("<int:pk>/chocoresult/", views.ChocoResult.as_view()), | ||||
| ] | ||||
|   | ||||
| @@ -1,49 +1,276 @@ | ||||
| import asyncio | ||||
| import os | ||||
| import requests | ||||
| from loguru import logger | ||||
| from packaging import version as pyver | ||||
| import time | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.http import HttpResponse | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.utils import timezone as djangotime | ||||
| from django.http import HttpResponse | ||||
|  | ||||
| from loguru import logger | ||||
| from packaging import version as pyver | ||||
| from rest_framework.authentication import TokenAuthentication | ||||
| from rest_framework.authtoken.models import Token | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.authentication import TokenAuthentication | ||||
| from rest_framework.permissions import IsAuthenticated | ||||
| from rest_framework.authtoken.models import Token | ||||
|  | ||||
| from agents.models import Agent | ||||
| from checks.models import Check | ||||
| from autotasks.models import AutomatedTask | ||||
| from accounts.models import User | ||||
| from winupdate.models import WinUpdatePolicy | ||||
| from software.models import InstalledSoftware | ||||
| from checks.serializers import CheckRunnerGetSerializer | ||||
| from agents.models import Agent | ||||
| from agents.serializers import WinAgentSerializer | ||||
| from autotasks.models import AutomatedTask | ||||
| from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer | ||||
| from winupdate.serializers import ApprovedUpdateSerializer | ||||
|  | ||||
| from agents.tasks import ( | ||||
|     agent_recovery_email_task, | ||||
|     agent_recovery_sms_task, | ||||
| ) | ||||
| from checks.models import Check | ||||
| from checks.serializers import CheckRunnerGetSerializer | ||||
| from checks.utils import bytes2human | ||||
| from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList | ||||
| from logs.models import PendingAction | ||||
| from software.models import InstalledSoftware | ||||
| from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats | ||||
| from winupdate.models import WinUpdate, WinUpdatePolicy | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
|  | ||||
| class CheckRunner(APIView): | ||||
|     """ | ||||
|     For the windows golang agent | ||||
|     """ | ||||
| class CheckIn(APIView): | ||||
|  | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def patch(self, request): | ||||
|         from alerts.models import Alert | ||||
|  | ||||
|         updated = False | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         if pyver.parse(request.data["version"]) > pyver.parse( | ||||
|             agent.version | ||||
|         ) or pyver.parse(request.data["version"]) == pyver.parse( | ||||
|             settings.LATEST_AGENT_VER | ||||
|         ): | ||||
|             updated = True | ||||
|         agent.version = request.data["version"] | ||||
|         agent.last_seen = djangotime.now() | ||||
|         agent.save(update_fields=["version", "last_seen"]) | ||||
|  | ||||
|         # change agent update pending status to completed if agent has just updated | ||||
|         if ( | ||||
|             updated | ||||
|             and agent.pendingactions.filter(  # type: ignore | ||||
|                 action_type="agentupdate", status="pending" | ||||
|             ).exists() | ||||
|         ): | ||||
|             agent.pendingactions.filter(  # type: ignore | ||||
|                 action_type="agentupdate", status="pending" | ||||
|             ).update(status="completed") | ||||
|  | ||||
|         # handles any alerting actions | ||||
|         if Alert.objects.filter(agent=agent, resolved=False).exists(): | ||||
|             Alert.handle_alert_resolve(agent) | ||||
|  | ||||
|         recovery = agent.recoveryactions.filter(last_run=None).last()  # type: ignore | ||||
|         if recovery is not None: | ||||
|             recovery.last_run = djangotime.now() | ||||
|             recovery.save(update_fields=["last_run"]) | ||||
|             handle_agent_recovery_task.delay(pk=recovery.pk)  # type: ignore | ||||
|             return Response("ok") | ||||
|  | ||||
|         # get any pending actions | ||||
|         if agent.pendingactions.filter(status="pending").exists():  # type: ignore | ||||
|             agent.handle_pending_actions() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def put(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True) | ||||
|  | ||||
|         if request.data["func"] == "disks": | ||||
|             disks = request.data["disks"] | ||||
|             new = [] | ||||
|             for disk in disks: | ||||
|                 tmp = {} | ||||
|                 for _, _ in disk.items(): | ||||
|                     tmp["device"] = disk["device"] | ||||
|                     tmp["fstype"] = disk["fstype"] | ||||
|                     tmp["total"] = bytes2human(disk["total"]) | ||||
|                     tmp["used"] = bytes2human(disk["used"]) | ||||
|                     tmp["free"] = bytes2human(disk["free"]) | ||||
|                     tmp["percent"] = int(disk["percent"]) | ||||
|                 new.append(tmp) | ||||
|  | ||||
|             serializer.is_valid(raise_exception=True) | ||||
|             serializer.save(disks=new) | ||||
|             return Response("ok") | ||||
|  | ||||
|         if request.data["func"] == "loggedonuser": | ||||
|             if request.data["logged_in_username"] != "None": | ||||
|                 serializer.is_valid(raise_exception=True) | ||||
|                 serializer.save(last_logged_in_user=request.data["logged_in_username"]) | ||||
|                 return Response("ok") | ||||
|  | ||||
|         if request.data["func"] == "software": | ||||
|             raw: SoftwareList = request.data["software"] | ||||
|             if not isinstance(raw, list): | ||||
|                 return notify_error("err") | ||||
|  | ||||
|             sw = filter_software(raw) | ||||
|             if not InstalledSoftware.objects.filter(agent=agent).exists(): | ||||
|                 InstalledSoftware(agent=agent, software=sw).save() | ||||
|             else: | ||||
|                 s = agent.installedsoftware_set.first()  # type: ignore | ||||
|                 s.software = sw | ||||
|                 s.save(update_fields=["software"]) | ||||
|  | ||||
|             return Response("ok") | ||||
|  | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|         return Response("ok") | ||||
|  | ||||
|     # called once during tacticalagent windows service startup | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         if not agent.choco_installed: | ||||
|             asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False)) | ||||
|  | ||||
|         time.sleep(0.5) | ||||
|         asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False)) | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class SyncMeshNodeID(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         if agent.mesh_node_id != request.data["nodeid"]: | ||||
|             agent.mesh_node_id = request.data["nodeid"] | ||||
|             agent.save(update_fields=["mesh_node_id"]) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class Choco(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         agent.choco_installed = request.data["installed"] | ||||
|         agent.save(update_fields=["choco_installed"]) | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class WinUpdates(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def put(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         reboot_policy: str = agent.get_patch_policy().reboot_after_install | ||||
|         reboot = False | ||||
|  | ||||
|         if reboot_policy == "always": | ||||
|             reboot = True | ||||
|  | ||||
|         if request.data["needs_reboot"]: | ||||
|             if reboot_policy == "required": | ||||
|                 reboot = True | ||||
|             elif reboot_policy == "never": | ||||
|                 agent.needs_reboot = True | ||||
|                 agent.save(update_fields=["needs_reboot"]) | ||||
|  | ||||
|         if reboot: | ||||
|             asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False)) | ||||
|             logger.info(f"{agent.hostname} is rebooting after updates were installed.") | ||||
|  | ||||
|         agent.delete_superseded_updates() | ||||
|         return Response("ok") | ||||
|  | ||||
|     def patch(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         u = agent.winupdates.filter(guid=request.data["guid"]).last()  # type: ignore | ||||
|         success: bool = request.data["success"] | ||||
|         if success: | ||||
|             u.result = "success" | ||||
|             u.downloaded = True | ||||
|             u.installed = True | ||||
|             u.date_installed = djangotime.now() | ||||
|             u.save( | ||||
|                 update_fields=[ | ||||
|                     "result", | ||||
|                     "downloaded", | ||||
|                     "installed", | ||||
|                     "date_installed", | ||||
|                 ] | ||||
|             ) | ||||
|         else: | ||||
|             u.result = "failed" | ||||
|             u.save(update_fields=["result"]) | ||||
|  | ||||
|         agent.delete_superseded_updates() | ||||
|         return Response("ok") | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         updates = request.data["wua_updates"] | ||||
|         for update in updates: | ||||
|             if agent.winupdates.filter(guid=update["guid"]).exists():  # type: ignore | ||||
|                 u = agent.winupdates.filter(guid=update["guid"]).last()  # type: ignore | ||||
|                 u.downloaded = update["downloaded"] | ||||
|                 u.installed = update["installed"] | ||||
|                 u.save(update_fields=["downloaded", "installed"]) | ||||
|             else: | ||||
|                 try: | ||||
|                     kb = "KB" + update["kb_article_ids"][0] | ||||
|                 except: | ||||
|                     continue | ||||
|  | ||||
|                 WinUpdate( | ||||
|                     agent=agent, | ||||
|                     guid=update["guid"], | ||||
|                     kb=kb, | ||||
|                     title=update["title"], | ||||
|                     installed=update["installed"], | ||||
|                     downloaded=update["downloaded"], | ||||
|                     description=update["description"], | ||||
|                     severity=update["severity"], | ||||
|                     categories=update["categories"], | ||||
|                     category_ids=update["category_ids"], | ||||
|                     kb_article_ids=update["kb_article_ids"], | ||||
|                     more_info_urls=update["more_info_urls"], | ||||
|                     support_url=update["support_url"], | ||||
|                     revision_number=update["revision_number"], | ||||
|                 ).save() | ||||
|  | ||||
|         agent.delete_superseded_updates() | ||||
|  | ||||
|         # more superseded updates cleanup | ||||
|         if pyver.parse(agent.version) <= pyver.parse("1.4.2"): | ||||
|             for u in agent.winupdates.filter(  # type: ignore | ||||
|                 date_installed__isnull=True, result="failed" | ||||
|             ).exclude(installed=True): | ||||
|                 u.delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class SupersededWinUpdate(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def post(self, request): | ||||
|         agent = get_object_or_404(Agent, agent_id=request.data["agent_id"]) | ||||
|         updates = agent.winupdates.filter(guid=request.data["guid"])  # type: ignore | ||||
|         for u in updates: | ||||
|             u.delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class CheckRunner(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def get(self, request, agentid): | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
|         checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False) | ||||
| @@ -74,10 +301,6 @@ class CheckRunnerInterval(APIView): | ||||
|  | ||||
|  | ||||
| class TaskRunner(APIView): | ||||
|     """ | ||||
|     For the windows golang agent | ||||
|     """ | ||||
|  | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
| @@ -87,6 +310,7 @@ class TaskRunner(APIView): | ||||
|         return Response(TaskGOGetSerializer(task).data) | ||||
|  | ||||
|     def patch(self, request, pk, agentid): | ||||
|         from alerts.models import Alert | ||||
|         from logs.models import AuditLog | ||||
|  | ||||
|         agent = get_object_or_404(Agent, agent_id=agentid) | ||||
| @@ -98,7 +322,18 @@ class TaskRunner(APIView): | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save(last_run=djangotime.now()) | ||||
|  | ||||
|         new_task = AutomatedTask.objects.get(pk=task.pk) | ||||
|         status = "failing" if task.retcode != 0 else "passing" | ||||
|  | ||||
|         new_task: AutomatedTask = AutomatedTask.objects.get(pk=task.pk) | ||||
|         new_task.status = status | ||||
|         new_task.save() | ||||
|  | ||||
|         if status == "passing": | ||||
|             if Alert.objects.filter(assigned_task=new_task, resolved=False).exists(): | ||||
|                 Alert.handle_alert_resolve(new_task) | ||||
|         else: | ||||
|             Alert.handle_alert_failure(new_task) | ||||
|  | ||||
|         AuditLog.objects.create( | ||||
|             username=agent.hostname, | ||||
|             agent=agent.hostname, | ||||
| @@ -175,10 +410,10 @@ class NewAgent(APIView): | ||||
|         agent.salt_id = f"{agent.hostname}-{agent.pk}" | ||||
|         agent.save(update_fields=["salt_id"]) | ||||
|  | ||||
|         user = User.objects.create_user( | ||||
|         user = User.objects.create_user(  # type: ignore | ||||
|             username=request.data["agent_id"], | ||||
|             agent=agent, | ||||
|             password=User.objects.make_random_password(60), | ||||
|             password=User.objects.make_random_password(60),  # type: ignore | ||||
|         ) | ||||
|  | ||||
|         token = Token.objects.create(user=user) | ||||
| @@ -190,10 +425,6 @@ class NewAgent(APIView): | ||||
|  | ||||
|         reload_nats() | ||||
|  | ||||
|         # Generate policies for new agent | ||||
|         agent.generate_checks_from_policies() | ||||
|         agent.generate_tasks_from_policies() | ||||
|  | ||||
|         # create agent install audit record | ||||
|         AuditLog.objects.create( | ||||
|             username=request.user, | ||||
| @@ -227,7 +458,7 @@ class Software(APIView): | ||||
|         if not InstalledSoftware.objects.filter(agent=agent).exists(): | ||||
|             InstalledSoftware(agent=agent, software=sw).save() | ||||
|         else: | ||||
|             s = agent.installedsoftware_set.first() | ||||
|             s = agent.installedsoftware_set.first()  # type: ignore | ||||
|             s.software = sw | ||||
|             s.save(update_fields=["software"]) | ||||
|  | ||||
| @@ -250,3 +481,35 @@ class Installer(APIView): | ||||
|             ) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|  | ||||
| class ChocoResult(APIView): | ||||
|     authentication_classes = [TokenAuthentication] | ||||
|     permission_classes = [IsAuthenticated] | ||||
|  | ||||
|     def patch(self, request, pk): | ||||
|         action = get_object_or_404(PendingAction, pk=pk) | ||||
|         results: str = request.data["results"] | ||||
|  | ||||
|         software_name = action.details["name"].lower() | ||||
|         success = [ | ||||
|             "install", | ||||
|             "of", | ||||
|             software_name, | ||||
|             "was", | ||||
|             "successful", | ||||
|             "installed", | ||||
|         ] | ||||
|         duplicate = [software_name, "already", "installed", "--force", "reinstall"] | ||||
|         installed = False | ||||
|  | ||||
|         if all(x in results.lower() for x in success): | ||||
|             installed = True | ||||
|         elif all(x in results.lower() for x in duplicate): | ||||
|             installed = True | ||||
|  | ||||
|         action.details["output"] = results | ||||
|         action.details["installed"] = installed | ||||
|         action.status = "completed" | ||||
|         action.save(update_fields=["details", "status"]) | ||||
|         return Response("ok") | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.0.6 on 2020-06-04 17:13 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -0,0 +1,20 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 14:08 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('alerts', '0004_auto_20210212_1408'), | ||||
|         ('automation', '0006_delete_policyexclusions'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='policy', | ||||
|             name='alert_template', | ||||
|             field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='policies', to='alerts.alerttemplate'), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,4 +1,5 @@ | ||||
| from django.db import models | ||||
|  | ||||
| from agents.models import Agent | ||||
| from core.models import CoreSettings | ||||
| from logs.models import BaseAuditModel | ||||
| @@ -9,14 +10,44 @@ class Policy(BaseAuditModel): | ||||
|     desc = models.CharField(max_length=255, null=True, blank=True) | ||||
|     active = models.BooleanField(default=False) | ||||
|     enforced = models.BooleanField(default=False) | ||||
|     alert_template = models.ForeignKey( | ||||
|         "alerts.AlertTemplate", | ||||
|         related_name="policies", | ||||
|         on_delete=models.SET_NULL, | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|  | ||||
|     def save(self, *args, **kwargs): | ||||
|         from automation.tasks import generate_agent_checks_from_policies_task | ||||
|  | ||||
|         # get old policy if exists | ||||
|         old_policy = type(self).objects.get(pk=self.pk) if self.pk else None | ||||
|         super(BaseAuditModel, self).save(*args, **kwargs) | ||||
|  | ||||
|         # generate agent checks only if active and enforced were changed | ||||
|         if old_policy: | ||||
|             if old_policy.active != self.active or old_policy.enforced != self.enforced: | ||||
|                 generate_agent_checks_from_policies_task.delay( | ||||
|                     policypk=self.pk, | ||||
|                     create_tasks=True, | ||||
|                 ) | ||||
|  | ||||
|     def delete(self, *args, **kwargs): | ||||
|         from automation.tasks import generate_agent_checks_task | ||||
|  | ||||
|         agents = list(self.related_agents().only("pk").values_list("pk", flat=True)) | ||||
|         super(BaseAuditModel, self).delete(*args, **kwargs) | ||||
|  | ||||
|         generate_agent_checks_task.delay(agents, create_tasks=True) | ||||
|  | ||||
|     @property | ||||
|     def is_default_server_policy(self): | ||||
|         return self.default_server_policy.exists() | ||||
|         return self.default_server_policy.exists()  # type: ignore | ||||
|  | ||||
|     @property | ||||
|     def is_default_workstation_policy(self): | ||||
|         return self.default_workstation_policy.exists() | ||||
|         return self.default_workstation_policy.exists()  # type: ignore | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.name | ||||
| @@ -25,7 +56,7 @@ class Policy(BaseAuditModel): | ||||
|         return self.get_related("server") | self.get_related("workstation") | ||||
|  | ||||
|     def get_related(self, mon_type): | ||||
|         explicit_agents = self.agents.filter(monitoring_type=mon_type) | ||||
|         explicit_agents = self.agents.filter(monitoring_type=mon_type)  # type: ignore | ||||
|         explicit_clients = getattr(self, f"{mon_type}_clients").all() | ||||
|         explicit_sites = getattr(self, f"{mon_type}_sites").all() | ||||
|  | ||||
| @@ -57,9 +88,8 @@ class Policy(BaseAuditModel): | ||||
|  | ||||
|     @staticmethod | ||||
|     def cascade_policy_tasks(agent): | ||||
|         from autotasks.tasks import delete_win_task_schedule | ||||
|  | ||||
|         from autotasks.models import AutomatedTask | ||||
|         from autotasks.tasks import delete_win_task_schedule | ||||
|         from logs.models import PendingAction | ||||
|  | ||||
|         # List of all tasks to be applied | ||||
| @@ -122,7 +152,9 @@ class Policy(BaseAuditModel): | ||||
|             delete_win_task_schedule.delay(task.pk) | ||||
|  | ||||
|         # handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline | ||||
|         for action in agent.pendingactions.exclude(status="completed"): | ||||
|         for action in agent.pendingactions.filter(action_type="taskaction").exclude( | ||||
|             status="completed" | ||||
|         ): | ||||
|             task = AutomatedTask.objects.get(pk=action.details["task_id"]) | ||||
|             if ( | ||||
|                 task.parent_task in agent_tasks_parent_pks | ||||
|   | ||||
| @@ -1,20 +1,16 @@ | ||||
| from rest_framework.serializers import ( | ||||
|     ModelSerializer, | ||||
|     SerializerMethodField, | ||||
|     StringRelatedField, | ||||
|     ReadOnlyField, | ||||
|     SerializerMethodField, | ||||
| ) | ||||
|  | ||||
| from clients.serializers import ClientSerializer, SiteSerializer | ||||
| from agents.serializers import AgentHostnameSerializer | ||||
|  | ||||
| from .models import Policy | ||||
| from agents.models import Agent | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check | ||||
| from clients.models import Client, Site | ||||
| from clients.models import Client | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .models import Policy | ||||
|  | ||||
|  | ||||
| class PolicySerializer(ModelSerializer): | ||||
|     class Meta: | ||||
| @@ -24,15 +20,11 @@ class PolicySerializer(ModelSerializer): | ||||
|  | ||||
| class PolicyTableSerializer(ModelSerializer): | ||||
|  | ||||
|     server_clients = ClientSerializer(many=True, read_only=True) | ||||
|     server_sites = SiteSerializer(many=True, read_only=True) | ||||
|     workstation_clients = ClientSerializer(many=True, read_only=True) | ||||
|     workstation_sites = SiteSerializer(many=True, read_only=True) | ||||
|     agents = AgentHostnameSerializer(many=True, read_only=True) | ||||
|     default_server_policy = ReadOnlyField(source="is_default_server_policy") | ||||
|     default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy") | ||||
|     agents_count = SerializerMethodField(read_only=True) | ||||
|     winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True) | ||||
|     alert_template = ReadOnlyField(source="alert_template.id") | ||||
|  | ||||
|     class Meta: | ||||
|         model = Policy | ||||
| @@ -78,49 +70,16 @@ class PolicyCheckSerializer(ModelSerializer): | ||||
|             "assignedtask", | ||||
|             "text_alert", | ||||
|             "email_alert", | ||||
|             "dashboard_alert", | ||||
|         ) | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class AutoTasksFieldSerializer(ModelSerializer): | ||||
|     assigned_check = PolicyCheckSerializer(read_only=True) | ||||
|     script = ReadOnlyField(source="script.id") | ||||
|  | ||||
|     class Meta: | ||||
|         model = AutomatedTask | ||||
|         fields = ("id", "enabled", "name", "schedule", "assigned_check") | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class AutoTaskPolicySerializer(ModelSerializer): | ||||
|  | ||||
|     autotasks = AutoTasksFieldSerializer(many=True, read_only=True) | ||||
|  | ||||
|     class Meta: | ||||
|         model = Policy | ||||
|         fields = ( | ||||
|             "id", | ||||
|             "name", | ||||
|             "autotasks", | ||||
|         ) | ||||
|         depth = 2 | ||||
|  | ||||
|  | ||||
| class RelatedClientPolicySerializer(ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Client | ||||
|         fields = ("workstation_policy", "server_policy") | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class RelatedSitePolicySerializer(ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Site | ||||
|         fields = ("workstation_policy", "server_policy") | ||||
|         depth = 1 | ||||
|  | ||||
|  | ||||
| class RelatedAgentPolicySerializer(ModelSerializer): | ||||
|     class Meta: | ||||
|         model = Agent | ||||
|         fields = ("policy",) | ||||
|         fields = "__all__" | ||||
|         depth = 1 | ||||
|   | ||||
| @@ -1,11 +1,12 @@ | ||||
| from automation.models import Policy | ||||
| from checks.models import Check | ||||
| from agents.models import Agent | ||||
|  | ||||
| from automation.models import Policy | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check | ||||
| from tacticalrmm.celery import app | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # generates policy checks on agents affected by a policy and optionally generate automated tasks | ||||
| def generate_agent_checks_from_policies_task(policypk, create_tasks=False): | ||||
|  | ||||
|     policy = Policy.objects.get(pk=policypk) | ||||
| @@ -21,7 +22,7 @@ def generate_agent_checks_from_policies_task(policypk, create_tasks=False): | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     else: | ||||
|         agents = policy.related_agents() | ||||
|         agents = policy.related_agents().only("pk") | ||||
|  | ||||
|     for agent in agents: | ||||
|         agent.generate_checks_from_policies() | ||||
| @@ -30,6 +31,17 @@ def generate_agent_checks_from_policies_task(policypk, create_tasks=False): | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # generates policy checks on a list of agents and optionally generate automated tasks | ||||
| def generate_agent_checks_task(agentpks, create_tasks=False): | ||||
|     for agent in Agent.objects.filter(pk__in=agentpks): | ||||
|         agent.generate_checks_from_policies() | ||||
|  | ||||
|         if create_tasks: | ||||
|             agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks | ||||
| def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False): | ||||
|  | ||||
|     for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type): | ||||
| @@ -40,6 +52,7 @@ def generate_agent_checks_by_location_task(location, mon_type, create_tasks=Fals | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # generates policy checks on all agent servers or workstations and optionally generate automated tasks | ||||
| def generate_all_agent_checks_task(mon_type, create_tasks=False): | ||||
|     for agent in Agent.objects.filter(monitoring_type=mon_type): | ||||
|         agent.generate_checks_from_policies() | ||||
| @@ -49,22 +62,30 @@ def generate_all_agent_checks_task(mon_type, create_tasks=False): | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # deletes a policy managed check from all agents | ||||
| def delete_policy_check_task(checkpk): | ||||
|  | ||||
|     Check.objects.filter(parent_check=checkpk).delete() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # updates policy managed check fields on agents | ||||
| def update_policy_check_fields_task(checkpk): | ||||
|  | ||||
|     check = Check.objects.get(pk=checkpk) | ||||
|  | ||||
|     Check.objects.filter(parent_check=checkpk).update( | ||||
|         threshold=check.threshold, | ||||
|         warning_threshold=check.warning_threshold, | ||||
|         error_threshold=check.error_threshold, | ||||
|         alert_severity=check.alert_severity, | ||||
|         name=check.name, | ||||
|         disk=check.disk, | ||||
|         fails_b4_alert=check.fails_b4_alert, | ||||
|         ip=check.ip, | ||||
|         script=check.script, | ||||
|         script_args=check.script_args, | ||||
|         info_return_codes=check.info_return_codes, | ||||
|         warning_return_codes=check.warning_return_codes, | ||||
|         timeout=check.timeout, | ||||
|         pass_if_start_pending=check.pass_if_start_pending, | ||||
|         pass_if_svc_not_exist=check.pass_if_svc_not_exist, | ||||
| @@ -79,10 +100,12 @@ def update_policy_check_fields_task(checkpk): | ||||
|         search_last_days=check.search_last_days, | ||||
|         email_alert=check.email_alert, | ||||
|         text_alert=check.text_alert, | ||||
|         dashboard_alert=check.dashboard_alert, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| # generates policy tasks on agents affected by a policy | ||||
| def generate_agent_tasks_from_policies_task(policypk): | ||||
|  | ||||
|     policy = Policy.objects.get(pk=policypk) | ||||
| @@ -98,23 +121,16 @@ def generate_agent_tasks_from_policies_task(policypk): | ||||
|             "pk", "monitoring_type" | ||||
|         ) | ||||
|     else: | ||||
|         agents = policy.related_agents() | ||||
|         agents = policy.related_agents().only("pk") | ||||
|  | ||||
|     for agent in agents: | ||||
|         agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def generate_agent_tasks_by_location_task(location, mon_type): | ||||
|  | ||||
|     for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type): | ||||
|         agent.generate_tasks_from_policies() | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def delete_policy_autotask_task(taskpk): | ||||
|     from autotasks.tasks import delete_win_task_schedule | ||||
|     from autotasks.models import AutomatedTask | ||||
|     from autotasks.tasks import delete_win_task_schedule | ||||
|  | ||||
|     for task in AutomatedTask.objects.filter(parent_task=taskpk): | ||||
|         delete_win_task_schedule.delay(task.pk) | ||||
| @@ -129,13 +145,23 @@ def run_win_policy_autotask_task(task_pks): | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def update_policy_task_fields_task(taskpk, enabled): | ||||
|     from autotasks.models import AutomatedTask | ||||
| def update_policy_task_fields_task(taskpk, update_agent=False): | ||||
|     from autotasks.tasks import enable_or_disable_win_task | ||||
|  | ||||
|     tasks = AutomatedTask.objects.filter(parent_task=taskpk) | ||||
|     task = AutomatedTask.objects.get(pk=taskpk) | ||||
|  | ||||
|     tasks.update(enabled=enabled) | ||||
|     AutomatedTask.objects.filter(parent_task=taskpk).update( | ||||
|         alert_severity=task.alert_severity, | ||||
|         email_alert=task.email_alert, | ||||
|         text_alert=task.text_alert, | ||||
|         dashboard_alert=task.dashboard_alert, | ||||
|         script=task.script, | ||||
|         script_args=task.script_args, | ||||
|         name=task.name, | ||||
|         timeout=task.timeout, | ||||
|         enabled=task.enabled, | ||||
|     ) | ||||
|  | ||||
|     for autotask in tasks: | ||||
|         enable_or_disable_win_task(autotask.pk, enabled) | ||||
|     if update_agent: | ||||
|         for task in AutomatedTask.objects.filter(parent_task=taskpk): | ||||
|             enable_or_disable_win_task.delay(task.pk, task.enabled) | ||||
|   | ||||
| @@ -1,21 +1,20 @@ | ||||
| from unittest.mock import patch | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from model_bakery import baker, seq | ||||
| from itertools import cycle | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from model_bakery import baker, seq | ||||
|  | ||||
| from agents.models import Agent | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from winupdate.models import WinUpdatePolicy | ||||
|  | ||||
| from .serializers import ( | ||||
|     PolicyTableSerializer, | ||||
|     PolicySerializer, | ||||
|     PolicyTaskStatusSerializer, | ||||
|     AutoTaskPolicySerializer, | ||||
|     PolicyOverviewSerializer, | ||||
|     PolicyCheckStatusSerializer, | ||||
|     AutoTasksFieldSerializer, | ||||
|     PolicyCheckSerializer, | ||||
|     RelatedAgentPolicySerializer, | ||||
|     RelatedSitePolicySerializer, | ||||
|     RelatedClientPolicySerializer, | ||||
|     PolicyCheckStatusSerializer, | ||||
|     PolicyOverviewSerializer, | ||||
|     PolicySerializer, | ||||
|     PolicyTableSerializer, | ||||
|     PolicyTaskStatusSerializer, | ||||
| ) | ||||
|  | ||||
|  | ||||
| @@ -91,7 +90,7 @@ class TestPolicyViews(TacticalTestCase): | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     @patch("automation.tasks.generate_agent_checks_from_policies_task.delay") | ||||
|     def test_update_policy(self, mock_checks_task): | ||||
|     def test_update_policy(self, generate_agent_checks_from_policies_task): | ||||
|         # returns 404 for invalid policy pk | ||||
|         resp = self.client.put("/automation/policies/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
| @@ -110,7 +109,7 @@ class TestPolicyViews(TacticalTestCase): | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # only called if active or enforced are updated | ||||
|         mock_checks_task.assert_not_called() | ||||
|         generate_agent_checks_from_policies_task.assert_not_called() | ||||
|  | ||||
|         data = { | ||||
|             "name": "Test Policy Update", | ||||
| @@ -121,40 +120,43 @@ class TestPolicyViews(TacticalTestCase): | ||||
|  | ||||
|         resp = self.client.put(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         mock_checks_task.assert_called_with(policypk=policy.pk, create_tasks=True) | ||||
|         generate_agent_checks_from_policies_task.assert_called_with( | ||||
|             policypk=policy.pk, create_tasks=True | ||||
|         ) | ||||
|  | ||||
|         self.check_not_authenticated("put", url) | ||||
|  | ||||
|     @patch("automation.tasks.generate_agent_checks_from_policies_task.delay") | ||||
|     @patch("automation.tasks.generate_agent_tasks_from_policies_task.delay") | ||||
|     def test_delete_policy(self, mock_tasks_task, mock_checks_task): | ||||
|     @patch("automation.tasks.generate_agent_checks_task.delay") | ||||
|     def test_delete_policy(self, generate_agent_checks_task): | ||||
|         # returns 404 for invalid policy pk | ||||
|         resp = self.client.delete("/automation/policies/500/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy") | ||||
|         site = baker.make("clients.Site") | ||||
|         agents = baker.make_recipe( | ||||
|             "agents.agent", site=site, policy=policy, _quantity=3 | ||||
|         ) | ||||
|         url = f"/automation/policies/{policy.pk}/" | ||||
|  | ||||
|         resp = self.client.delete(url, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         mock_checks_task.assert_called_with(policypk=policy.pk) | ||||
|         mock_tasks_task.assert_called_with(policypk=policy.pk) | ||||
|         generate_agent_checks_task.assert_called_with( | ||||
|             [agent.pk for agent in agents], create_tasks=True | ||||
|         ) | ||||
|  | ||||
|         self.check_not_authenticated("delete", url) | ||||
|  | ||||
|     def test_get_all_policy_tasks(self): | ||||
|         # returns 404 for invalid policy pk | ||||
|         resp = self.client.get("/automation/500/policyautomatedtasks/", format="json") | ||||
|         self.assertEqual(resp.status_code, 404) | ||||
|  | ||||
|         # create policy with tasks | ||||
|         policy = baker.make("automation.Policy") | ||||
|         baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) | ||||
|         tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) | ||||
|         url = f"/automation/{policy.pk}/policyautomatedtasks/" | ||||
|  | ||||
|         resp = self.client.get(url, format="json") | ||||
|         serializer = AutoTaskPolicySerializer(policy) | ||||
|         serializer = AutoTasksFieldSerializer(tasks, many=True) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
| @@ -180,8 +182,9 @@ class TestPolicyViews(TacticalTestCase): | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_get_policy_check_status(self): | ||||
|         # set data | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
|         # setup data | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.agent", site=site) | ||||
|         policy = baker.make("automation.Policy") | ||||
|         policy_diskcheck = baker.make_recipe("checks.diskspace_check", policy=policy) | ||||
|         managed_check = baker.make_recipe( | ||||
| @@ -246,266 +249,6 @@ class TestPolicyViews(TacticalTestCase): | ||||
|  | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     @patch("agents.models.Agent.generate_checks_from_policies") | ||||
|     @patch("automation.tasks.generate_agent_checks_by_location_task.delay") | ||||
|     def test_update_policy_add( | ||||
|         self, | ||||
|         mock_checks_location_task, | ||||
|         mock_checks_task, | ||||
|     ): | ||||
|         url = f"/automation/related/" | ||||
|  | ||||
|         # data setup | ||||
|         policy = baker.make("automation.Policy") | ||||
|         client = baker.make("clients.Client") | ||||
|         site = baker.make("clients.Site", client=client) | ||||
|         agent = baker.make_recipe("agents.agent", site=site) | ||||
|  | ||||
|         # test add client to policy data | ||||
|         client_server_payload = { | ||||
|             "type": "client", | ||||
|             "pk": agent.client.pk, | ||||
|             "server_policy": policy.pk, | ||||
|         } | ||||
|         client_workstation_payload = { | ||||
|             "type": "client", | ||||
|             "pk": agent.client.pk, | ||||
|             "workstation_policy": policy.pk, | ||||
|         } | ||||
|  | ||||
|         # test add site to policy data | ||||
|         site_server_payload = { | ||||
|             "type": "site", | ||||
|             "pk": agent.site.pk, | ||||
|             "server_policy": policy.pk, | ||||
|         } | ||||
|         site_workstation_payload = { | ||||
|             "type": "site", | ||||
|             "pk": agent.site.pk, | ||||
|             "workstation_policy": policy.pk, | ||||
|         } | ||||
|  | ||||
|         # test add agent to policy data | ||||
|         agent_payload = {"type": "agent", "pk": agent.pk, "policy": policy.pk} | ||||
|  | ||||
|         # test client server policy add | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test client workstation policy add | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site add server policy | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site add workstation policy | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test agent add | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_task.assert_called() | ||||
|         mock_checks_task.reset_mock() | ||||
|  | ||||
|         # Adding the same relations shouldn't trigger mocks | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_task.assert_not_called() | ||||
|  | ||||
|         # test remove client from policy data | ||||
|         client_server_payload = {"type": "client", "pk": client.pk, "server_policy": 0} | ||||
|         client_workstation_payload = { | ||||
|             "type": "client", | ||||
|             "pk": client.pk, | ||||
|             "workstation_policy": 0, | ||||
|         } | ||||
|  | ||||
|         # test remove site from policy data | ||||
|         site_server_payload = {"type": "site", "pk": site.pk, "server_policy": 0} | ||||
|         site_workstation_payload = { | ||||
|             "type": "site", | ||||
|             "pk": site.pk, | ||||
|             "workstation_policy": 0, | ||||
|         } | ||||
|  | ||||
|         # test remove agent from policy | ||||
|         agent_payload = {"type": "agent", "pk": agent.pk, "policy": 0} | ||||
|  | ||||
|         # test client server policy remove | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test client workstation policy remove | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site__client_id": client.id}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site remove server policy | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test site remove workstation policy | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # called because the relation changed | ||||
|         mock_checks_location_task.assert_called_with( | ||||
|             location={"site_id": site.id}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         mock_checks_location_task.reset_mock() | ||||
|  | ||||
|         # test agent remove | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         # called because the relation changed | ||||
|         mock_checks_task.assert_called() | ||||
|         mock_checks_task.reset_mock() | ||||
|  | ||||
|         # adding the same relations shouldn't trigger mocks | ||||
|         resp = self.client.post(url, client_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         resp = self.client.post(url, client_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # shouldn't be called since nothing changed | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, site_server_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         resp = self.client.post(url, site_workstation_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # shouldn't be called since nothing changed | ||||
|         mock_checks_location_task.assert_not_called() | ||||
|  | ||||
|         resp = self.client.post(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # shouldn't be called since nothing changed | ||||
|         mock_checks_task.assert_not_called() | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_get_relation_by_type(self): | ||||
|         url = f"/automation/related/" | ||||
|  | ||||
|         # data setup | ||||
|         policy = baker.make("automation.Policy") | ||||
|         client = baker.make("clients.Client", workstation_policy=policy) | ||||
|         site = baker.make("clients.Site", server_policy=policy) | ||||
|         agent = baker.make_recipe("agents.agent", site=site, policy=policy) | ||||
|  | ||||
|         client_payload = {"type": "client", "pk": client.pk} | ||||
|  | ||||
|         # test add site to policy | ||||
|         site_payload = {"type": "site", "pk": site.pk} | ||||
|  | ||||
|         # test add agent to policy | ||||
|         agent_payload = {"type": "agent", "pk": agent.pk} | ||||
|  | ||||
|         # test client relation get | ||||
|         serializer = RelatedClientPolicySerializer(client) | ||||
|         resp = self.client.patch(url, client_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         # test site relation get | ||||
|         serializer = RelatedSitePolicySerializer(site) | ||||
|         resp = self.client.patch(url, site_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         # test agent relation get | ||||
|         serializer = RelatedAgentPolicySerializer(agent) | ||||
|         resp = self.client.patch(url, agent_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|  | ||||
|         invalid_payload = {"type": "bad_type", "pk": 5} | ||||
|  | ||||
|         resp = self.client.patch(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|     def test_get_policy_task_status(self): | ||||
|  | ||||
|         # policy with a task | ||||
| @@ -739,8 +482,7 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         checks = self.create_checks(policy=policy) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.agent", site=site, policy=policy) | ||||
|         agent = baker.make_recipe("agents.agent", policy=policy) | ||||
|  | ||||
|         # test policy assigned to agent | ||||
|         generate_agent_checks_from_policies_task(policy.id) | ||||
| @@ -756,16 +498,19 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|             if check.check_type == "diskspace": | ||||
|                 self.assertEqual(check.parent_check, checks[0].id) | ||||
|                 self.assertEqual(check.disk, checks[0].disk) | ||||
|                 self.assertEqual(check.threshold, checks[0].threshold) | ||||
|                 self.assertEqual(check.error_threshold, checks[0].error_threshold) | ||||
|                 self.assertEqual(check.warning_threshold, checks[0].warning_threshold) | ||||
|             elif check.check_type == "ping": | ||||
|                 self.assertEqual(check.parent_check, checks[1].id) | ||||
|                 self.assertEqual(check.ip, checks[1].ip) | ||||
|             elif check.check_type == "cpuload": | ||||
|                 self.assertEqual(check.parent_check, checks[2].id) | ||||
|                 self.assertEqual(check.threshold, checks[2].threshold) | ||||
|                 self.assertEqual(check.error_threshold, checks[2].error_threshold) | ||||
|                 self.assertEqual(check.warning_threshold, checks[2].warning_threshold) | ||||
|             elif check.check_type == "memory": | ||||
|                 self.assertEqual(check.parent_check, checks[3].id) | ||||
|                 self.assertEqual(check.threshold, checks[3].threshold) | ||||
|                 self.assertEqual(check.error_threshold, checks[3].error_threshold) | ||||
|                 self.assertEqual(check.warning_threshold, checks[3].warning_threshold) | ||||
|             elif check.check_type == "winsvc": | ||||
|                 self.assertEqual(check.parent_check, checks[4].id) | ||||
|                 self.assertEqual(check.svc_name, checks[4].svc_name) | ||||
| @@ -801,69 +546,246 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|             7, | ||||
|         ) | ||||
|  | ||||
|     def test_generating_agent_policy_checks_by_location(self): | ||||
|         from .tasks import generate_agent_checks_by_location_task | ||||
|     @patch("automation.tasks.generate_agent_checks_by_location_task.delay") | ||||
|     def test_generating_agent_policy_checks_by_location( | ||||
|         self, generate_agent_checks_by_location_task | ||||
|     ): | ||||
|         from automation.tasks import ( | ||||
|             generate_agent_checks_by_location_task as generate_agent_checks, | ||||
|         ) | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|         clients = baker.make( | ||||
|             "clients.Client", | ||||
|             _quantity=2, | ||||
|             server_policy=policy, | ||||
|             workstation_policy=policy, | ||||
|         ) | ||||
|         sites = baker.make("clients.Site", client=cycle(clients), _quantity=4) | ||||
|         server_agent = baker.make_recipe("agents.server_agent", site=sites[0]) | ||||
|         workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2]) | ||||
|         agent1 = baker.make_recipe("agents.server_agent", site=sites[1]) | ||||
|         agent2 = baker.make_recipe("agents.workstation_agent", site=sites[3]) | ||||
|  | ||||
|         generate_agent_checks_by_location_task( | ||||
|             {"site_id": sites[0].id}, | ||||
|             "server", | ||||
|         baker.make( | ||||
|             "autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3 | ||||
|         ) | ||||
|  | ||||
|         server_agent = baker.make_recipe("agents.server_agent") | ||||
|         workstation_agent = baker.make_recipe("agents.workstation_agent") | ||||
|  | ||||
|         # no checks should be preset on agents | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|         # set workstation policy on client and policy checks should be there | ||||
|         workstation_agent.client.workstation_policy = policy | ||||
|         workstation_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # server_agent should have policy checks and the other agents should not | ||||
|         # make sure the checks were added | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         # remove workstation policy from client | ||||
|         workstation_agent.client.workstation_policy = None | ||||
|         workstation_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": workstation_agent.client.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure the checks were removed | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         # set server policy on client and policy checks should be there | ||||
|         server_agent.client.server_policy = policy | ||||
|         server_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were added | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0) | ||||
|  | ||||
|         generate_agent_checks_by_location_task( | ||||
|             {"site__client_id": clients[0].id}, | ||||
|             "workstation", | ||||
|         # remove server policy from client | ||||
|         server_agent.client.server_policy = None | ||||
|         server_agent.client.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         # workstation_agent should now have policy checks and the other agents should not | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site__client_id": server_agent.client.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were removed | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|         # set workstation policy on site and policy checks should be there | ||||
|         workstation_agent.site.workstation_policy = policy | ||||
|         workstation_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were added on workstation | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|     def test_generating_policy_checks_for_all_agents(self): | ||||
|         from .tasks import generate_all_agent_checks_task | ||||
|         # remove workstation policy from site | ||||
|         workstation_agent.site.workstation_policy = None | ||||
|         workstation_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": workstation_agent.site.pk}, | ||||
|             mon_type="workstation", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were removed | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         # set server policy on site and policy checks should be there | ||||
|         server_agent.site.server_policy = policy | ||||
|         server_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were added | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|         # remove server policy from site | ||||
|         server_agent.site.server_policy = None | ||||
|         server_agent.site.save() | ||||
|  | ||||
|         # should trigger task in save method on core | ||||
|         generate_agent_checks_by_location_task.assert_called_with( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|         generate_agent_checks_by_location_task.reset_mock() | ||||
|  | ||||
|         generate_agent_checks( | ||||
|             location={"site_id": server_agent.site.pk}, | ||||
|             mon_type="server", | ||||
|             create_tasks=True, | ||||
|         ) | ||||
|  | ||||
|         # make sure checks were removed | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0) | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0 | ||||
|         ) | ||||
|  | ||||
|     @patch("automation.tasks.generate_all_agent_checks_task.delay") | ||||
|     def test_generating_policy_checks_for_all_agents( | ||||
|         self, generate_all_agent_checks_task | ||||
|     ): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         from .tasks import generate_all_agent_checks_task as generate_all_checks | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|  | ||||
|         site = baker.make("clients.Site") | ||||
|         server_agents = baker.make_recipe("agents.server_agent", site=site, _quantity=3) | ||||
|         workstation_agents = baker.make_recipe( | ||||
|             "agents.workstation_agent", site=site, _quantity=4 | ||||
|         ) | ||||
|         server_agents = baker.make_recipe("agents.server_agent", _quantity=3) | ||||
|         workstation_agents = baker.make_recipe("agents.workstation_agent", _quantity=4) | ||||
|         core = CoreSettings.objects.first() | ||||
|         core.server_policy = policy | ||||
|         core.workstation_policy = policy | ||||
|         core.save() | ||||
|  | ||||
|         generate_all_agent_checks_task("server", create_tasks=True) | ||||
|         generate_all_agent_checks_task.assert_called_with( | ||||
|             mon_type="server", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.reset_mock() | ||||
|         generate_all_checks(mon_type="server", create_tasks=True) | ||||
|  | ||||
|         # all servers should have 7 checks | ||||
|         for agent in server_agents: | ||||
| @@ -872,24 +794,50 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         for agent in workstation_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         generate_all_agent_checks_task("workstation", create_tasks=True) | ||||
|         core.server_policy = None | ||||
|         core.workstation_policy = policy | ||||
|         core.save() | ||||
|  | ||||
|         # all agents should have 7 checks now | ||||
|         generate_all_agent_checks_task.assert_any_call( | ||||
|             mon_type="workstation", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.assert_any_call( | ||||
|             mon_type="server", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.reset_mock() | ||||
|         generate_all_checks(mon_type="server", create_tasks=True) | ||||
|         generate_all_checks(mon_type="workstation", create_tasks=True) | ||||
|  | ||||
|         # all workstations should have 7 checks | ||||
|         for agent in server_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         for agent in workstation_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
|  | ||||
|         core.workstation_policy = None | ||||
|         core.save() | ||||
|  | ||||
|         generate_all_agent_checks_task.assert_called_with( | ||||
|             mon_type="workstation", create_tasks=True | ||||
|         ) | ||||
|         generate_all_agent_checks_task.reset_mock() | ||||
|         generate_all_checks(mon_type="workstation", create_tasks=True) | ||||
|  | ||||
|         # nothing should have the checks | ||||
|         for agent in server_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|         for agent in workstation_agents: | ||||
|             self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0) | ||||
|  | ||||
|     def test_delete_policy_check(self): | ||||
|         from .tasks import delete_policy_check_task | ||||
|         from .models import Policy | ||||
|         from .tasks import delete_policy_check_task | ||||
|  | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent.generate_checks_from_policies() | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         # make sure agent has 7 checks | ||||
|         self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
| @@ -908,13 +856,12 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         ) | ||||
|  | ||||
|     def update_policy_check_fields(self): | ||||
|         from .tasks import update_policy_check_fields_task | ||||
|         from .models import Policy | ||||
|         from .tasks import update_policy_check_fields_task | ||||
|  | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         self.create_checks(policy=policy) | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|         agent.generate_checks_from_policies() | ||||
|  | ||||
|         # make sure agent has 7 checks | ||||
|         self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7) | ||||
| @@ -946,8 +893,7 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         tasks = baker.make( | ||||
|             "autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3 | ||||
|         ) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         generate_agent_tasks_from_policies_task(policy.id) | ||||
|  | ||||
| @@ -968,61 +914,19 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|                 self.assertEqual(task.parent_task, tasks[2].id) | ||||
|                 self.assertEqual(task.name, tasks[2].name) | ||||
|  | ||||
|     def test_generate_agent_tasks_by_location(self): | ||||
|         from .tasks import generate_agent_tasks_by_location_task | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         baker.make( | ||||
|             "autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3 | ||||
|         ) | ||||
|         clients = baker.make( | ||||
|             "clients.Client", | ||||
|             _quantity=2, | ||||
|             server_policy=policy, | ||||
|             workstation_policy=policy, | ||||
|         ) | ||||
|         sites = baker.make("clients.Site", client=cycle(clients), _quantity=4) | ||||
|         server_agent = baker.make_recipe("agents.server_agent", site=sites[0]) | ||||
|         workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2]) | ||||
|         agent1 = baker.make_recipe("agents.agent", site=sites[1]) | ||||
|         agent2 = baker.make_recipe("agents.agent", site=sites[3]) | ||||
|  | ||||
|         generate_agent_tasks_by_location_task({"site_id": sites[0].id}, "server") | ||||
|  | ||||
|         # all servers in site1 and site2 should have 3 tasks | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 0 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0) | ||||
|  | ||||
|         generate_agent_tasks_by_location_task( | ||||
|             {"site__client_id": clients[0].id}, "workstation" | ||||
|         ) | ||||
|  | ||||
|         # all workstations in Default1 should have 3 tasks | ||||
|         self.assertEqual( | ||||
|             Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 3 | ||||
|         ) | ||||
|         self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0) | ||||
|         self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0) | ||||
|  | ||||
|     @patch("autotasks.tasks.delete_win_task_schedule.delay") | ||||
|     def test_delete_policy_tasks(self, delete_win_task_schedule): | ||||
|         from .tasks import delete_policy_autotask_task | ||||
|  | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent.generate_tasks_from_policies() | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         delete_policy_autotask_task(tasks[0].id) | ||||
|  | ||||
|         delete_win_task_schedule.assert_called_with(agent.autotasks.first().id) | ||||
|         delete_win_task_schedule.assert_called_with( | ||||
|             agent.autotasks.get(parent_task=tasks[0].id).id | ||||
|         ) | ||||
|  | ||||
|     @patch("autotasks.tasks.run_win_task.delay") | ||||
|     def test_run_policy_task(self, run_win_task): | ||||
| @@ -1037,25 +941,46 @@ class TestPolicyTasks(TacticalTestCase): | ||||
|         for task in tasks: | ||||
|             run_win_task.assert_any_call(task.id) | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_update_policy_tasks(self, nats_cmd): | ||||
|     @patch("autotasks.tasks.enable_or_disable_win_task.delay") | ||||
|     def test_update_policy_tasks(self, enable_or_disable_win_task): | ||||
|         from .tasks import update_policy_task_fields_task | ||||
|         from autotasks.models import AutomatedTask | ||||
|  | ||||
|         nats_cmd.return_value = "ok" | ||||
|  | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy", active=True) | ||||
|         tasks = baker.make( | ||||
|             "autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3 | ||||
|         ) | ||||
|         site = baker.make("clients.Site") | ||||
|         agent = baker.make_recipe("agents.server_agent", site=site, policy=policy) | ||||
|         agent.generate_tasks_from_policies() | ||||
|         agent = baker.make_recipe("agents.server_agent", policy=policy) | ||||
|  | ||||
|         tasks[0].enabled = False | ||||
|         tasks[0].save() | ||||
|  | ||||
|         update_policy_task_fields_task(tasks[0].id, enabled=False) | ||||
|         update_policy_task_fields_task(tasks[0].id) | ||||
|         enable_or_disable_win_task.assert_not_called() | ||||
|  | ||||
|         self.assertFalse(AutomatedTask.objects.get(parent_task=tasks[0].id).enabled) | ||||
|         self.assertFalse(agent.autotasks.get(parent_task=tasks[0].id).enabled) | ||||
|  | ||||
|         update_policy_task_fields_task(tasks[0].id, update_agent=True) | ||||
|         enable_or_disable_win_task.assert_called_with( | ||||
|             agent.autotasks.get(parent_task=tasks[0].id).id, False | ||||
|         ) | ||||
|  | ||||
|     @patch("agents.models.Agent.generate_tasks_from_policies") | ||||
|     @patch("agents.models.Agent.generate_checks_from_policies") | ||||
|     def test_generate_agent_checks_with_agentpks(self, generate_checks, generate_tasks): | ||||
|         from automation.tasks import generate_agent_checks_task | ||||
|  | ||||
|         agents = baker.make_recipe("agents.agent", _quantity=5) | ||||
|  | ||||
|         # reset because creating agents triggers it | ||||
|         generate_checks.reset_mock() | ||||
|         generate_tasks.reset_mock() | ||||
|  | ||||
|         generate_agent_checks_task([agent.pk for agent in agents]) | ||||
|         self.assertEquals(generate_checks.call_count, 5) | ||||
|         generate_tasks.assert_not_called() | ||||
|         generate_checks.reset_mock() | ||||
|  | ||||
|         generate_agent_checks_task([agent.pk for agent in agents], create_tasks=True) | ||||
|         self.assertEquals(generate_checks.call_count, 5) | ||||
|         self.assertEquals(generate_checks.call_count, 5) | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|     path("policies/", views.GetAddPolicies.as_view()), | ||||
|     path("policies/<int:pk>/related/", views.GetRelated.as_view()), | ||||
|     path("related/", views.GetRelated.as_view()), | ||||
|     path("policies/overview/", views.OverviewPolicy.as_view()), | ||||
|     path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()), | ||||
|     path("<int:pk>/policychecks/", views.PolicyCheck.as_view()), | ||||
|   | ||||
| @@ -1,39 +1,27 @@ | ||||
| from django.shortcuts import get_object_or_404 | ||||
|  | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| from rest_framework import status | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from .models import Policy | ||||
| from agents.models import Agent | ||||
| from clients.models import Client, Site | ||||
| from checks.models import Check | ||||
| from autotasks.models import AutomatedTask | ||||
| from winupdate.models import WinUpdatePolicy | ||||
|  | ||||
| from clients.serializers import ClientSerializer, SiteSerializer | ||||
| from agents.serializers import AgentHostnameSerializer | ||||
| from autotasks.models import AutomatedTask | ||||
| from checks.models import Check | ||||
| from clients.models import Client | ||||
| from clients.serializers import ClientSerializer, SiteSerializer | ||||
| from winupdate.models import WinUpdatePolicy | ||||
| from winupdate.serializers import WinUpdatePolicySerializer | ||||
|  | ||||
| from .models import Policy | ||||
| from .serializers import ( | ||||
|     AutoTasksFieldSerializer, | ||||
|     PolicyCheckSerializer, | ||||
|     PolicyCheckStatusSerializer, | ||||
|     PolicyOverviewSerializer, | ||||
|     PolicySerializer, | ||||
|     PolicyTableSerializer, | ||||
|     PolicyOverviewSerializer, | ||||
|     PolicyCheckStatusSerializer, | ||||
|     PolicyCheckSerializer, | ||||
|     PolicyTaskStatusSerializer, | ||||
|     AutoTaskPolicySerializer, | ||||
|     RelatedClientPolicySerializer, | ||||
|     RelatedSitePolicySerializer, | ||||
|     RelatedAgentPolicySerializer, | ||||
| ) | ||||
|  | ||||
| from .tasks import ( | ||||
|     generate_agent_checks_from_policies_task, | ||||
|     generate_agent_checks_by_location_task, | ||||
|     generate_agent_tasks_from_policies_task, | ||||
|     run_win_policy_autotask_task, | ||||
| ) | ||||
| from .tasks import run_win_policy_autotask_task | ||||
|  | ||||
|  | ||||
| class GetAddPolicies(APIView): | ||||
| @@ -72,29 +60,14 @@ class GetUpdateDeletePolicy(APIView): | ||||
|     def put(self, request, pk): | ||||
|         policy = get_object_or_404(Policy, pk=pk) | ||||
|  | ||||
|         old_active = policy.active | ||||
|         old_enforced = policy.enforced | ||||
|  | ||||
|         serializer = PolicySerializer(instance=policy, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         saved_policy = serializer.save() | ||||
|  | ||||
|         # Generate agent checks only if active and enforced were changed | ||||
|         if saved_policy.active != old_active or saved_policy.enforced != old_enforced: | ||||
|             generate_agent_checks_from_policies_task.delay( | ||||
|                 policypk=policy.pk, | ||||
|                 create_tasks=(saved_policy.active != old_active), | ||||
|             ) | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def delete(self, request, pk): | ||||
|         policy = get_object_or_404(Policy, pk=pk) | ||||
|  | ||||
|         # delete all managed policy checks off of agents | ||||
|         generate_agent_checks_from_policies_task.delay(policypk=policy.pk) | ||||
|         generate_agent_tasks_from_policies_task.delay(policypk=policy.pk) | ||||
|         policy.delete() | ||||
|         get_object_or_404(Policy, pk=pk).delete() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
| @@ -103,8 +76,8 @@ class PolicyAutoTask(APIView): | ||||
|  | ||||
|     # tasks associated with policy | ||||
|     def get(self, request, pk): | ||||
|         policy = get_object_or_404(Policy, pk=pk) | ||||
|         return Response(AutoTaskPolicySerializer(policy).data) | ||||
|         tasks = AutomatedTask.objects.filter(policy=pk) | ||||
|         return Response(AutoTasksFieldSerializer(tasks, many=True).data) | ||||
|  | ||||
|     # get status of all tasks | ||||
|     def patch(self, request, task): | ||||
| @@ -183,205 +156,12 @@ class GetRelated(APIView): | ||||
|         ).data | ||||
|  | ||||
|         response["agents"] = AgentHostnameSerializer( | ||||
|             policy.related_agents(), | ||||
|             policy.related_agents().only("pk", "hostname"), | ||||
|             many=True, | ||||
|         ).data | ||||
|  | ||||
|         return Response(response) | ||||
|  | ||||
|     # update agents, clients, sites to policy | ||||
|     def post(self, request): | ||||
|  | ||||
|         related_type = request.data["type"] | ||||
|         pk = request.data["pk"] | ||||
|  | ||||
|         # workstation policy is set | ||||
|         if ( | ||||
|             "workstation_policy" in request.data | ||||
|             and request.data["workstation_policy"] != 0 | ||||
|         ): | ||||
|             policy = get_object_or_404(Policy, pk=request.data["workstation_policy"]) | ||||
|  | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check and see if workstation policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not client.workstation_policy | ||||
|                     or client.workstation_policy | ||||
|                     and client.workstation_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     client.workstation_policy = policy | ||||
|                     client.save() | ||||
|  | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="workstation", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|  | ||||
|                 # Check and see if workstation policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not site.workstation_policy | ||||
|                     or site.workstation_policy | ||||
|                     and site.workstation_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     site.workstation_policy = policy | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.id}, | ||||
|                         mon_type="workstation", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # server policy is set | ||||
|         if "server_policy" in request.data and request.data["server_policy"] != 0: | ||||
|             policy = get_object_or_404(Policy, pk=request.data["server_policy"]) | ||||
|  | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check and see if server policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not client.server_policy | ||||
|                     or client.server_policy | ||||
|                     and client.server_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     client.server_policy = policy | ||||
|                     client.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="server", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|  | ||||
|                 # Check and see if server policy changed and regenerate policies | ||||
|                 if ( | ||||
|                     not site.server_policy | ||||
|                     or site.server_policy | ||||
|                     and site.server_policy.pk != policy.pk | ||||
|                 ): | ||||
|                     site.server_policy = policy | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.id}, | ||||
|                         mon_type="server", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # If workstation policy was cleared | ||||
|         if ( | ||||
|             "workstation_policy" in request.data | ||||
|             and request.data["workstation_policy"] == 0 | ||||
|         ): | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check if workstation policy is set and update it to None | ||||
|                 if client.workstation_policy: | ||||
|  | ||||
|                     client.workstation_policy = None | ||||
|                     client.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="workstation", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|  | ||||
|                 # Check if workstation policy is set and update it to None | ||||
|                 if site.workstation_policy: | ||||
|  | ||||
|                     site.workstation_policy = None | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.id}, | ||||
|                         mon_type="workstation", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # server policy cleared | ||||
|         if "server_policy" in request.data and request.data["server_policy"] == 0: | ||||
|  | ||||
|             if related_type == "client": | ||||
|                 client = get_object_or_404(Client, pk=pk) | ||||
|  | ||||
|                 # Check if server policy is set and update it to None | ||||
|                 if client.server_policy: | ||||
|  | ||||
|                     client.server_policy = None | ||||
|                     client.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site__client_id": client.id}, | ||||
|                         mon_type="server", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|             if related_type == "site": | ||||
|                 site = get_object_or_404(Site, pk=pk) | ||||
|                 # Check if server policy is set and update it to None | ||||
|                 if site.server_policy: | ||||
|  | ||||
|                     site.server_policy = None | ||||
|                     site.save() | ||||
|                     generate_agent_checks_by_location_task.delay( | ||||
|                         location={"site_id": site.pk}, | ||||
|                         mon_type="server", | ||||
|                         create_tasks=True, | ||||
|                     ) | ||||
|  | ||||
|         # agent policies | ||||
|         if related_type == "agent": | ||||
|             agent = get_object_or_404(Agent, pk=pk) | ||||
|  | ||||
|             if "policy" in request.data and request.data["policy"] != 0: | ||||
|                 policy = Policy.objects.get(pk=request.data["policy"]) | ||||
|  | ||||
|                 # Check and see if policy changed and regenerate policies | ||||
|                 if not agent.policy or agent.policy and agent.policy.pk != policy.pk: | ||||
|                     agent.policy = policy | ||||
|                     agent.save() | ||||
|                     agent.generate_checks_from_policies() | ||||
|                     agent.generate_tasks_from_policies() | ||||
|             else: | ||||
|                 if agent.policy: | ||||
|                     agent.policy = None | ||||
|                     agent.save() | ||||
|                     agent.generate_checks_from_policies() | ||||
|                     agent.generate_tasks_from_policies() | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     # view to get policies set on client, site, and workstation | ||||
|     def patch(self, request): | ||||
|         related_type = request.data["type"] | ||||
|  | ||||
|         # client, site, or agent pk | ||||
|         pk = request.data["pk"] | ||||
|  | ||||
|         if related_type == "agent": | ||||
|             agent = Agent.objects.get(pk=pk) | ||||
|             return Response(RelatedAgentPolicySerializer(agent).data) | ||||
|  | ||||
|         if related_type == "site": | ||||
|             site = Site.objects.get(pk=pk) | ||||
|             return Response(RelatedSitePolicySerializer(site).data) | ||||
|  | ||||
|         if related_type == "client": | ||||
|             client = Client.objects.get(pk=pk) | ||||
|             return Response(RelatedClientPolicySerializer(client).data) | ||||
|  | ||||
|         content = {"error": "Data was submitted incorrectly"} | ||||
|         return Response(content, status=status.HTTP_400_BAD_REQUEST) | ||||
|  | ||||
|  | ||||
| class UpdatePatchPolicy(APIView): | ||||
|  | ||||
| @@ -391,7 +171,7 @@ class UpdatePatchPolicy(APIView): | ||||
|  | ||||
|         serializer = WinUpdatePolicySerializer(data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.policy = policy | ||||
|         serializer.policy = policy  # type: ignore | ||||
|         serializer.save() | ||||
|  | ||||
|         return Response("ok") | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from agents.models import Agent | ||||
| from autotasks.tasks import remove_orphaned_win_tasks | ||||
|  | ||||
| @@ -7,7 +8,7 @@ class Command(BaseCommand): | ||||
|     help = "Checks for orphaned tasks on all agents and removes them" | ||||
|  | ||||
|     def handle(self, *args, **kwargs): | ||||
|         agents = Agent.objects.only("pk", "last_seen", "overdue_time") | ||||
|         agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time") | ||||
|         online = [i for i in agents if i.status == "online"] | ||||
|         for agent in online: | ||||
|             remove_orphaned_win_tasks.delay(agent.pk) | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| # Generated by Django 3.0.6 on 2020-05-31 01:23 | ||||
|  | ||||
| import django.contrib.postgres.fields | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| from django.db import migrations | ||||
|  | ||||
| from tacticalrmm.utils import get_bit_days | ||||
|  | ||||
| DAYS_OF_WEEK = { | ||||
|   | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-27 22:21 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0010_migrate_days_to_bitdays'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='None', max_length=30, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,33 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-28 04:17 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0011_automatedtask_alert_severity'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='email_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='text_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='text_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 03:07 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0012_auto_20210128_0417'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='automatedtask', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 21:11 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0013_auto_20210129_0307'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='dashboard_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,23 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 17:28 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0014_automatedtask_dashboard_alert'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_text_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 21:17 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0015_auto_20210205_1728'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='status', | ||||
|             field=models.CharField(choices=[('passing', 'Passing'), ('failing', 'Failing'), ('pending', 'Pending')], default='pending', max_length=30), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,29 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-10 15:12 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0016_automatedtask_status'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='resolved_text_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='automatedtask', | ||||
|             name='text_sent', | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.7 on 2021-02-24 05:37 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('autotasks', '0017_auto_20210210_1512'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='automatedtask', | ||||
|             name='run_asap_after_missed', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,14 +1,21 @@ | ||||
| import pytz | ||||
| import datetime as dt | ||||
| import random | ||||
| import string | ||||
| import datetime as dt | ||||
|  | ||||
| from django.db import models | ||||
| import pytz | ||||
| from django.conf import settings | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.db import models | ||||
| from django.db.models.fields import DateTimeField | ||||
| from django.utils import timezone as djangotime | ||||
| from loguru import logger | ||||
|  | ||||
| from alerts.models import SEVERITY_CHOICES | ||||
| from logs.models import BaseAuditModel | ||||
| from tacticalrmm.utils import bitdays_to_string | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| RUN_TIME_DAY_CHOICES = [ | ||||
|     (0, "Monday"), | ||||
|     (1, "Tuesday"), | ||||
| @@ -32,6 +39,12 @@ SYNC_STATUS_CHOICES = [ | ||||
|     ("pendingdeletion", "Pending Deletion on Agent"), | ||||
| ] | ||||
|  | ||||
| TASK_STATUS_CHOICES = [ | ||||
|     ("passing", "Passing"), | ||||
|     ("failing", "Failing"), | ||||
|     ("pending", "Pending"), | ||||
| ] | ||||
|  | ||||
|  | ||||
| class AutomatedTask(BaseAuditModel): | ||||
|     agent = models.ForeignKey( | ||||
| @@ -83,6 +96,7 @@ class AutomatedTask(BaseAuditModel): | ||||
|     ) | ||||
|     run_time_date = DateTimeField(null=True, blank=True) | ||||
|     remove_if_not_scheduled = models.BooleanField(default=False) | ||||
|     run_asap_after_missed = models.BooleanField(default=False)  # added in agent v1.4.7 | ||||
|     managed_by_policy = models.BooleanField(default=False) | ||||
|     parent_task = models.PositiveIntegerField(null=True, blank=True) | ||||
|     win_task_name = models.CharField(max_length=255, null=True, blank=True) | ||||
| @@ -93,9 +107,18 @@ class AutomatedTask(BaseAuditModel): | ||||
|     execution_time = models.CharField(max_length=100, default="0.0000") | ||||
|     last_run = models.DateTimeField(null=True, blank=True) | ||||
|     enabled = models.BooleanField(default=True) | ||||
|     status = models.CharField( | ||||
|         max_length=30, choices=TASK_STATUS_CHOICES, default="pending" | ||||
|     ) | ||||
|     sync_status = models.CharField( | ||||
|         max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced" | ||||
|     ) | ||||
|     alert_severity = models.CharField( | ||||
|         max_length=30, choices=SEVERITY_CHOICES, default="info" | ||||
|     ) | ||||
|     email_alert = models.BooleanField(default=False) | ||||
|     text_alert = models.BooleanField(default=False) | ||||
|     dashboard_alert = models.BooleanField(default=False) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.name | ||||
| @@ -140,22 +163,49 @@ class AutomatedTask(BaseAuditModel): | ||||
|     def create_policy_task(self, agent=None, policy=None): | ||||
|         from .tasks import create_win_task_schedule | ||||
|  | ||||
|         # if policy is present, then this task is being copied to another policy | ||||
|         # if agent is present, then this task is being created on an agent from a policy | ||||
|         # exit if neither are set or if both are set | ||||
|         if not agent and not policy or agent and policy: | ||||
|             return | ||||
|  | ||||
|         assigned_check = None | ||||
|  | ||||
|         # get correct assigned check to task if set | ||||
|         if agent and self.assigned_check: | ||||
|             assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.pk) | ||||
|             # check if there is a matching check on the agent | ||||
|             if agent.agentchecks.filter(parent_check=self.assigned_check.pk).exists(): | ||||
|                 assigned_check = agent.agentchecks.filter( | ||||
|                     parent_check=self.assigned_check.pk | ||||
|                 ).first() | ||||
|             # check was overriden by agent and we need to use that agents check | ||||
|             else: | ||||
|                 if agent.agentchecks.filter( | ||||
|                     check_type=self.assigned_check.check_type, overriden_by_policy=True | ||||
|                 ).exists(): | ||||
|                     assigned_check = agent.agentchecks.filter( | ||||
|                         check_type=self.assigned_check.check_type, | ||||
|                         overriden_by_policy=True, | ||||
|                     ).first() | ||||
|         elif policy and self.assigned_check: | ||||
|             assigned_check = policy.policychecks.get(name=self.assigned_check.name) | ||||
|             if policy.policychecks.filter(name=self.assigned_check.name).exists(): | ||||
|                 assigned_check = policy.policychecks.filter( | ||||
|                     name=self.assigned_check.name | ||||
|                 ).first() | ||||
|             else: | ||||
|                 assigned_check = policy.policychecks.filter( | ||||
|                     check_type=self.assigned_check.check_type | ||||
|                 ).first() | ||||
|  | ||||
|         task = AutomatedTask.objects.create( | ||||
|             agent=agent, | ||||
|             policy=policy, | ||||
|             managed_by_policy=bool(agent), | ||||
|             parent_task=(self.pk if agent else None), | ||||
|             alert_severity=self.alert_severity, | ||||
|             email_alert=self.email_alert, | ||||
|             text_alert=self.text_alert, | ||||
|             dashboard_alert=self.dashboard_alert, | ||||
|             script=self.script, | ||||
|             script_args=self.script_args, | ||||
|             assigned_check=assigned_check, | ||||
| @@ -169,6 +219,85 @@ class AutomatedTask(BaseAuditModel): | ||||
|             timeout=self.timeout, | ||||
|             enabled=self.enabled, | ||||
|             remove_if_not_scheduled=self.remove_if_not_scheduled, | ||||
|             run_asap_after_missed=self.run_asap_after_missed, | ||||
|         ) | ||||
|  | ||||
|         create_win_task_schedule.delay(task.pk) | ||||
|  | ||||
|     def should_create_alert(self, alert_template): | ||||
|         return ( | ||||
|             self.dashboard_alert | ||||
|             or self.email_alert | ||||
|             or self.text_alert | ||||
|             or ( | ||||
|                 alert_template | ||||
|                 and ( | ||||
|                     alert_template.task_always_alert | ||||
|                     or alert_template.task_always_email | ||||
|                     or alert_template.task_always_text | ||||
|                 ) | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def send_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
|         else: | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_mail(subject, body, alert_template) | ||||
|  | ||||
|     def send_sms(self): | ||||
|  | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
|         else: | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_sms(body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_email(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|  | ||||
|         CORE.send_mail(subject, body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_sms(self): | ||||
|         from core.models import CoreSettings | ||||
|  | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = ( | ||||
|             subject | ||||
|             + f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}" | ||||
|         ) | ||||
|         CORE.send_sms(body, alert_template=alert_template) | ||||
|   | ||||
| @@ -1,12 +1,11 @@ | ||||
| import pytz | ||||
| from rest_framework import serializers | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from agents.models import Agent | ||||
| from scripts.models import Script | ||||
|  | ||||
| from scripts.serializers import ScriptCheckSerializer | ||||
| from checks.serializers import CheckSerializer | ||||
| from scripts.models import Script | ||||
| from scripts.serializers import ScriptCheckSerializer | ||||
|  | ||||
| from .models import AutomatedTask | ||||
|  | ||||
|  | ||||
| class TaskSerializer(serializers.ModelSerializer): | ||||
| @@ -14,6 +13,24 @@ class TaskSerializer(serializers.ModelSerializer): | ||||
|     assigned_check = CheckSerializer(read_only=True) | ||||
|     schedule = serializers.ReadOnlyField() | ||||
|     last_run = serializers.ReadOnlyField(source="last_run_as_timezone") | ||||
|     alert_template = serializers.SerializerMethodField() | ||||
|  | ||||
|     def get_alert_template(self, obj): | ||||
|  | ||||
|         if obj.agent: | ||||
|             alert_template = obj.agent.get_alert_template() | ||||
|         else: | ||||
|             alert_template = None | ||||
|  | ||||
|         if not alert_template: | ||||
|             return None | ||||
|         else: | ||||
|             return { | ||||
|                 "name": alert_template.name, | ||||
|                 "always_email": alert_template.task_always_email, | ||||
|                 "always_text": alert_template.task_always_text, | ||||
|                 "always_alert": alert_template.task_always_alert, | ||||
|             } | ||||
|  | ||||
|     class Meta: | ||||
|         model = AutomatedTask | ||||
|   | ||||
| @@ -1,14 +1,19 @@ | ||||
| import asyncio | ||||
| import datetime as dt | ||||
| from loguru import logger | ||||
| from tacticalrmm.celery import app | ||||
| from django.conf import settings | ||||
| import random | ||||
| from time import sleep | ||||
| from typing import Union | ||||
|  | ||||
| import pytz | ||||
| from django.conf import settings | ||||
| from django.utils import timezone as djangotime | ||||
| from loguru import logger | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from logs.models import PendingAction | ||||
| from tacticalrmm.celery import app | ||||
|  | ||||
| from .models import AutomatedTask | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| @@ -40,7 +45,7 @@ def create_win_task_schedule(pk, pending_action=False): | ||||
|             task.run_time_date = now.astimezone(agent_tz).replace( | ||||
|                 tzinfo=pytz.utc | ||||
|             ) + djangotime.timedelta(minutes=5) | ||||
|             task.save() | ||||
|             task.save(update_fields=["run_time_date"]) | ||||
|  | ||||
|         nats_data = { | ||||
|             "func": "schedtask", | ||||
| @@ -57,9 +62,12 @@ def create_win_task_schedule(pk, pending_action=False): | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         if task.remove_if_not_scheduled and pyver.parse( | ||||
|         if task.run_asap_after_missed and pyver.parse( | ||||
|             task.agent.version | ||||
|         ) >= pyver.parse("1.1.2"): | ||||
|         ) >= pyver.parse("1.4.7"): | ||||
|             nats_data["schedtaskpayload"]["run_asap_after_missed"] = True | ||||
|  | ||||
|         if task.remove_if_not_scheduled: | ||||
|             nats_data["schedtaskpayload"]["deleteafter"] = True | ||||
|  | ||||
|     elif task.task_type == "checkfailure" or task.task_type == "manual": | ||||
| @@ -243,3 +251,85 @@ def remove_orphaned_win_tasks(agentpk): | ||||
|                 logger.info(f"Removed orphaned task {task} from {agent.hostname}") | ||||
|  | ||||
|     logger.info(f"Orphaned task cleanup finished on {agent.hostname}") | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_task_email_alert(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_task.send_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 alert.assigned_task.send_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_task_sms_alert(pk: int, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_task.send_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send a text only if the last text sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 sleep(random.randint(1, 3)) | ||||
|                 alert.assigned_task.send_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_task_sms_alert(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.resolved_sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_task.send_resolved_sms() | ||||
|         alert.resolved_sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_task_email_alert(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.resolved_email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_task.send_resolved_email() | ||||
|         alert.resolved_email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|   | ||||
| @@ -1,14 +1,15 @@ | ||||
| import datetime as dt | ||||
| from unittest.mock import patch, call | ||||
| from model_bakery import baker | ||||
| from django.utils import timezone as djangotime | ||||
| from unittest.mock import call, patch | ||||
|  | ||||
| from django.utils import timezone as djangotime | ||||
| from model_bakery import baker | ||||
|  | ||||
| from logs.models import PendingAction | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from logs.models import PendingAction | ||||
| from .serializers import AutoTaskSerializer | ||||
| from .tasks import remove_orphaned_win_tasks, run_win_task, create_win_task_schedule | ||||
| from .tasks import create_win_task_schedule, remove_orphaned_win_tasks, run_win_task | ||||
|  | ||||
|  | ||||
| class TestAutotaskViews(TacticalTestCase): | ||||
| @@ -150,7 +151,9 @@ class TestAutotaskViews(TacticalTestCase): | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         update_policy_task_fields_task.assert_called_with(policy_task.id, True) | ||||
|         update_policy_task_fields_task.assert_called_with( | ||||
|             policy_task.id, update_agent=True | ||||
|         ) | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|   | ||||
| @@ -1,32 +1,28 @@ | ||||
| import asyncio | ||||
| import pytz | ||||
|  | ||||
| from django.shortcuts import get_object_or_404 | ||||
|  | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.decorators import api_view | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from agents.models import Agent | ||||
| from checks.models import Check | ||||
|  | ||||
| from scripts.models import Script | ||||
| from core.models import CoreSettings | ||||
|  | ||||
| from .serializers import TaskSerializer, AutoTaskSerializer | ||||
| from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error | ||||
|  | ||||
| from .models import AutomatedTask | ||||
| from .serializers import AutoTaskSerializer, TaskSerializer | ||||
| from .tasks import ( | ||||
|     create_win_task_schedule, | ||||
|     delete_win_task_schedule, | ||||
|     enable_or_disable_win_task, | ||||
| ) | ||||
| from tacticalrmm.utils import notify_error, get_bit_days | ||||
|  | ||||
|  | ||||
| class AddAutoTask(APIView): | ||||
|     def post(self, request): | ||||
|         from automation.tasks import generate_agent_tasks_from_policies_task | ||||
|         from automation.models import Policy | ||||
|         from automation.tasks import generate_agent_tasks_from_policies_task | ||||
|  | ||||
|         data = request.data | ||||
|         script = get_object_or_404(Script, pk=data["autotask"]["script"]) | ||||
| @@ -76,11 +72,25 @@ class AutoTask(APIView): | ||||
|  | ||||
|         agent = get_object_or_404(Agent, pk=pk) | ||||
|         ctx = { | ||||
|             "default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone), | ||||
|             "default_tz": get_default_timezone(), | ||||
|             "agent_tz": agent.time_zone, | ||||
|         } | ||||
|         return Response(AutoTaskSerializer(agent, context=ctx).data) | ||||
|  | ||||
|     def put(self, request, pk): | ||||
|         from automation.tasks import update_policy_task_fields_task | ||||
|  | ||||
|         task = get_object_or_404(AutomatedTask, pk=pk) | ||||
|  | ||||
|         serializer = TaskSerializer(instance=task, data=request.data, partial=True) | ||||
|         serializer.is_valid(raise_exception=True) | ||||
|         serializer.save() | ||||
|  | ||||
|         if task.policy: | ||||
|             update_policy_task_fields_task.delay(task.pk) | ||||
|  | ||||
|         return Response("ok") | ||||
|  | ||||
|     def patch(self, request, pk): | ||||
|         from automation.tasks import update_policy_task_fields_task | ||||
|  | ||||
| @@ -93,7 +103,7 @@ class AutoTask(APIView): | ||||
|                 enable_or_disable_win_task.delay(pk=task.pk, action=action) | ||||
|  | ||||
|             else: | ||||
|                 update_policy_task_fields_task.delay(task.pk, action) | ||||
|                 update_policy_task_fields_task.delay(task.pk, update_agent=True) | ||||
|  | ||||
|             task.enabled = action | ||||
|             task.save(update_fields=["enabled"]) | ||||
|   | ||||
| @@ -1,21 +1,27 @@ | ||||
| from .models import Check | ||||
| from model_bakery.recipe import Recipe, seq | ||||
| from model_bakery.recipe import Recipe | ||||
|  | ||||
| check = Recipe(Check) | ||||
| check = Recipe("checks.Check") | ||||
|  | ||||
| diskspace_check = check.extend(check_type="diskspace", disk="C:", threshold=75) | ||||
| diskspace_check = check.extend( | ||||
|     check_type="diskspace", disk="C:", warning_threshold=30, error_threshold=10 | ||||
| ) | ||||
|  | ||||
| cpuload_check = check.extend(check_type="cpuload", threshold=75) | ||||
| cpuload_check = check.extend( | ||||
|     check_type="cpuload", warning_threshold=30, error_threshold=75 | ||||
| ) | ||||
|  | ||||
| ping_check = check.extend(check_type="ping", ip="10.10.10.10") | ||||
|  | ||||
| memory_check = check.extend(check_type="memory", threshold=75) | ||||
| memory_check = check.extend( | ||||
|     check_type="memory", warning_threshold=60, error_threshold=75 | ||||
| ) | ||||
|  | ||||
| winsvc_check = check.extend( | ||||
|     check_type="winsvc", | ||||
|     svc_name="ServiceName", | ||||
|     svc_display_name="ServiceName", | ||||
|     svc_policy_mode="manual", | ||||
|     pass_if_svc_not_exist=False, | ||||
| ) | ||||
|  | ||||
| eventlog_check = check.extend( | ||||
|   | ||||
| @@ -3,8 +3,8 @@ | ||||
| import django.contrib.postgres.fields | ||||
| import django.contrib.postgres.fields.jsonb | ||||
| import django.core.validators | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-09 21:36 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
							
								
								
									
										43
									
								
								api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								api/tacticalrmm/checks/migrations/0016_auto_20210123_0149.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,43 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-23 01:49 | ||||
|  | ||||
| import django.contrib.postgres.fields | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0015_auto_20210110_1808'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='threshold', | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='error_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='info_return_codes', | ||||
|             field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='warning_return_codes', | ||||
|             field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveIntegerField(), blank=True, default=list, null=True, size=None), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='warning_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, default=0, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-01-29 21:11 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0016_auto_20210123_0149'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='dashboard_alert', | ||||
|             field=models.BooleanField(default=False), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										18
									
								
								api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								api/tacticalrmm/checks/migrations/0018_auto_20210205_1647.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 16:47 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0017_check_dashboard_alert'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='check', | ||||
|             name='alert_severity', | ||||
|             field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='warning', max_length=15, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										23
									
								
								api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								api/tacticalrmm/checks/migrations/0019_auto_20210205_1728.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-05 17:28 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0018_auto_20210205_1647'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='resolved_email_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name='check', | ||||
|             name='resolved_text_sent', | ||||
|             field=models.DateTimeField(blank=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										29
									
								
								api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								api/tacticalrmm/checks/migrations/0020_auto_20210210_1512.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-10 15:12 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0019_auto_20210205_1728'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='resolved_email_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='resolved_text_sent', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name='check', | ||||
|             name='text_sent', | ||||
|         ), | ||||
|     ] | ||||
							
								
								
									
										24
									
								
								api/tacticalrmm/checks/migrations/0021_auto_20210212_1429.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								api/tacticalrmm/checks/migrations/0021_auto_20210212_1429.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| # Generated by Django 3.1.4 on 2021-02-12 14:29 | ||||
|  | ||||
| import django.core.validators | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ('checks', '0020_auto_20210210_1512'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name='check', | ||||
|             name='error_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(99)]), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name='check', | ||||
|             name='warning_threshold', | ||||
|             field=models.PositiveIntegerField(blank=True, default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(99)]), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,21 +1,25 @@ | ||||
| import asyncio | ||||
| import string | ||||
| import os | ||||
| import json | ||||
| import pytz | ||||
| from statistics import mean, mode | ||||
| import os | ||||
| import string | ||||
| from statistics import mean | ||||
| from typing import Any | ||||
|  | ||||
| from django.db import models | ||||
| import pytz | ||||
| from django.conf import settings | ||||
| from django.contrib.postgres.fields import ArrayField | ||||
| from django.core.validators import MinValueValidator, MaxValueValidator | ||||
| from rest_framework.fields import JSONField | ||||
| from django.core.validators import MaxValueValidator, MinValueValidator | ||||
| from django.db import models | ||||
| from loguru import logger | ||||
|  | ||||
| from alerts.models import SEVERITY_CHOICES | ||||
| from core.models import CoreSettings | ||||
| from logs.models import BaseAuditModel | ||||
| from .tasks import handle_check_email_alert_task, handle_check_sms_alert_task | ||||
|  | ||||
| from .utils import bytes2human | ||||
|  | ||||
| logger.configure(**settings.LOG_CONFIG) | ||||
|  | ||||
| CHECK_TYPE_CHOICES = [ | ||||
|     ("diskspace", "Disk Space Check"), | ||||
|     ("ping", "Ping Check"), | ||||
| @@ -84,18 +88,34 @@ class Check(BaseAuditModel): | ||||
|     last_run = models.DateTimeField(null=True, blank=True) | ||||
|     email_alert = models.BooleanField(default=False) | ||||
|     text_alert = models.BooleanField(default=False) | ||||
|     dashboard_alert = models.BooleanField(default=False) | ||||
|     fails_b4_alert = models.PositiveIntegerField(default=1) | ||||
|     fail_count = models.PositiveIntegerField(default=0) | ||||
|     email_sent = models.DateTimeField(null=True, blank=True) | ||||
|     text_sent = models.DateTimeField(null=True, blank=True) | ||||
|     outage_history = models.JSONField(null=True, blank=True)  # store | ||||
|     extra_details = models.JSONField(null=True, blank=True) | ||||
|  | ||||
|     # check specific fields | ||||
|  | ||||
|     # for eventlog, script, ip, and service alert severity | ||||
|     alert_severity = models.CharField( | ||||
|         max_length=15, | ||||
|         choices=SEVERITY_CHOICES, | ||||
|         default="warning", | ||||
|         null=True, | ||||
|         blank=True, | ||||
|     ) | ||||
|  | ||||
|     # threshold percent for diskspace, cpuload or memory check | ||||
|     threshold = models.PositiveIntegerField( | ||||
|         null=True, blank=True, validators=[MinValueValidator(1), MaxValueValidator(99)] | ||||
|     error_threshold = models.PositiveIntegerField( | ||||
|         validators=[MinValueValidator(0), MaxValueValidator(99)], | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=0, | ||||
|     ) | ||||
|     warning_threshold = models.PositiveIntegerField( | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         validators=[MinValueValidator(0), MaxValueValidator(99)], | ||||
|         default=0, | ||||
|     ) | ||||
|     # diskcheck i.e C:, D: etc | ||||
|     disk = models.CharField(max_length=2, null=True, blank=True) | ||||
| @@ -115,6 +135,18 @@ class Check(BaseAuditModel): | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     info_return_codes = ArrayField( | ||||
|         models.PositiveIntegerField(), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     warning_return_codes = ArrayField( | ||||
|         models.PositiveIntegerField(), | ||||
|         null=True, | ||||
|         blank=True, | ||||
|         default=list, | ||||
|     ) | ||||
|     timeout = models.PositiveIntegerField(null=True, blank=True) | ||||
|     stdout = models.TextField(null=True, blank=True) | ||||
|     stderr = models.TextField(null=True, blank=True) | ||||
| @@ -159,17 +191,31 @@ class Check(BaseAuditModel): | ||||
|     @property | ||||
|     def readable_desc(self): | ||||
|         if self.check_type == "diskspace": | ||||
|             return f"{self.get_check_type_display()}: Drive {self.disk} < {self.threshold}%" | ||||
|  | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             return f"{self.get_check_type_display()}: Drive {self.disk} - {text}"  # type: ignore | ||||
|         elif self.check_type == "ping": | ||||
|             return f"{self.get_check_type_display()}: {self.name}" | ||||
|             return f"{self.get_check_type_display()}: {self.name}"  # type: ignore | ||||
|         elif self.check_type == "cpuload" or self.check_type == "memory": | ||||
|             return f"{self.get_check_type_display()} > {self.threshold}%" | ||||
|  | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             return f"{self.get_check_type_display()} - {text}"  # type: ignore | ||||
|         elif self.check_type == "winsvc": | ||||
|             return f"{self.get_check_type_display()}: {self.svc_display_name}" | ||||
|             return f"{self.get_check_type_display()}: {self.svc_display_name}"  # type: ignore | ||||
|         elif self.check_type == "eventlog": | ||||
|             return f"{self.get_check_type_display()}: {self.name}" | ||||
|             return f"{self.get_check_type_display()}: {self.name}"  # type: ignore | ||||
|         elif self.check_type == "script": | ||||
|             return f"{self.get_check_type_display()}: {self.script.name}" | ||||
|             return f"{self.get_check_type_display()}: {self.script.name}"  # type: ignore | ||||
|         else: | ||||
|             return "n/a" | ||||
|  | ||||
| @@ -188,15 +234,13 @@ class Check(BaseAuditModel): | ||||
|         return self.last_run | ||||
|  | ||||
|     @property | ||||
|     def non_editable_fields(self): | ||||
|     def non_editable_fields(self) -> list[str]: | ||||
|         return [ | ||||
|             "check_type", | ||||
|             "status", | ||||
|             "more_info", | ||||
|             "last_run", | ||||
|             "fail_count", | ||||
|             "email_sent", | ||||
|             "text_sent", | ||||
|             "outage_history", | ||||
|             "extra_details", | ||||
|             "stdout", | ||||
| @@ -215,10 +259,28 @@ class Check(BaseAuditModel): | ||||
|             "modified_time", | ||||
|         ] | ||||
|  | ||||
|     def add_check_history(self, value, more_info=None): | ||||
|     def should_create_alert(self, alert_template): | ||||
|  | ||||
|         return ( | ||||
|             self.dashboard_alert | ||||
|             or self.email_alert | ||||
|             or self.text_alert | ||||
|             or ( | ||||
|                 alert_template | ||||
|                 and ( | ||||
|                     alert_template.check_always_alert | ||||
|                     or alert_template.check_always_email | ||||
|                     or alert_template.check_always_text | ||||
|                 ) | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def add_check_history(self, value: int, more_info: Any = None) -> None: | ||||
|         CheckHistory.objects.create(check_history=self, y=value, results=more_info) | ||||
|  | ||||
|     def handle_checkv2(self, data): | ||||
|         from alerts.models import Alert | ||||
|  | ||||
|         # cpuload or mem checks | ||||
|         if self.check_type == "cpuload" or self.check_type == "memory": | ||||
|  | ||||
| @@ -231,8 +293,12 @@ class Check(BaseAuditModel): | ||||
|  | ||||
|             avg = int(mean(self.history)) | ||||
|  | ||||
|             if avg > self.threshold: | ||||
|             if self.error_threshold and avg > self.error_threshold: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "error" | ||||
|             elif self.warning_threshold and avg > self.warning_threshold: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "warning" | ||||
|             else: | ||||
|                 self.status = "passing" | ||||
|  | ||||
| @@ -246,17 +312,26 @@ class Check(BaseAuditModel): | ||||
|                 total = bytes2human(data["total"]) | ||||
|                 free = bytes2human(data["free"]) | ||||
|  | ||||
|                 if (100 - percent_used) < self.threshold: | ||||
|                 if self.error_threshold and (100 - percent_used) < self.error_threshold: | ||||
|                     self.status = "failing" | ||||
|                     self.alert_severity = "error" | ||||
|                 elif ( | ||||
|                     self.warning_threshold | ||||
|                     and (100 - percent_used) < self.warning_threshold | ||||
|                 ): | ||||
|                     self.status = "failing" | ||||
|                     self.alert_severity = "warning" | ||||
|  | ||||
|                 else: | ||||
|                     self.status = "passing" | ||||
|  | ||||
|                 self.more_info = f"Total: {total}B, Free: {free}B" | ||||
|  | ||||
|                 # add check history | ||||
|                 self.add_check_history(percent_used) | ||||
|                 self.add_check_history(100 - percent_used) | ||||
|             else: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "error" | ||||
|                 self.more_info = f"Disk {self.disk} does not exist" | ||||
|  | ||||
|             self.save(update_fields=["more_info"]) | ||||
| @@ -273,8 +348,15 @@ class Check(BaseAuditModel): | ||||
|                 # golang agent | ||||
|                 self.execution_time = "{:.4f}".format(data["runtime"]) | ||||
|  | ||||
|             if data["retcode"] != 0: | ||||
|             if data["retcode"] in self.info_return_codes: | ||||
|                 self.alert_severity = "info" | ||||
|                 self.status = "failing" | ||||
|             elif data["retcode"] in self.warning_return_codes: | ||||
|                 self.alert_severity = "warning" | ||||
|                 self.status = "failing" | ||||
|             elif data["retcode"] != 0: | ||||
|                 self.status = "failing" | ||||
|                 self.alert_severity = "error" | ||||
|             else: | ||||
|                 self.status = "passing" | ||||
|  | ||||
| @@ -428,20 +510,16 @@ class Check(BaseAuditModel): | ||||
|         # handle status | ||||
|         if self.status == "failing": | ||||
|             self.fail_count += 1 | ||||
|             self.save(update_fields=["status", "fail_count"]) | ||||
|             self.save(update_fields=["status", "fail_count", "alert_severity"]) | ||||
|  | ||||
|             if self.fail_count >= self.fails_b4_alert: | ||||
|                 Alert.handle_alert_failure(self) | ||||
|  | ||||
|         elif self.status == "passing": | ||||
|             if self.fail_count != 0: | ||||
|                 self.fail_count = 0 | ||||
|                 self.save(update_fields=["status", "fail_count"]) | ||||
|             else: | ||||
|                 self.save(update_fields=["status"]) | ||||
|  | ||||
|         if self.fail_count >= self.fails_b4_alert: | ||||
|             if self.email_alert: | ||||
|                 handle_check_email_alert_task.delay(self.pk) | ||||
|             if self.text_alert: | ||||
|                 handle_check_sms_alert_task.delay(self.pk) | ||||
|             self.fail_count = 0 | ||||
|             self.save(update_fields=["status", "fail_count", "alert_severity"]) | ||||
|             if Alert.objects.filter(assigned_check=self, resolved=False).exists(): | ||||
|                 Alert.handle_alert_resolve(self) | ||||
|  | ||||
|         return self.status | ||||
|  | ||||
| @@ -478,17 +556,22 @@ class Check(BaseAuditModel): | ||||
|             managed_by_policy=bool(agent), | ||||
|             parent_check=(self.pk if agent else None), | ||||
|             name=self.name, | ||||
|             alert_severity=self.alert_severity, | ||||
|             check_type=self.check_type, | ||||
|             email_alert=self.email_alert, | ||||
|             dashboard_alert=self.dashboard_alert, | ||||
|             text_alert=self.text_alert, | ||||
|             fails_b4_alert=self.fails_b4_alert, | ||||
|             extra_details=self.extra_details, | ||||
|             threshold=self.threshold, | ||||
|             error_threshold=self.error_threshold, | ||||
|             warning_threshold=self.warning_threshold, | ||||
|             disk=self.disk, | ||||
|             ip=self.ip, | ||||
|             script=self.script, | ||||
|             script_args=self.script_args, | ||||
|             timeout=self.timeout, | ||||
|             info_return_codes=self.info_return_codes, | ||||
|             warning_return_codes=self.warning_return_codes, | ||||
|             svc_name=self.svc_name, | ||||
|             svc_display_name=self.svc_display_name, | ||||
|             pass_if_start_pending=self.pass_if_start_pending, | ||||
| @@ -530,19 +613,27 @@ class Check(BaseAuditModel): | ||||
|     def send_email(self): | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|  | ||||
|         body: str = "" | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
|         else: | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         if self.check_type == "diskspace": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             percent_used = [ | ||||
|                 d["percent"] for d in self.agent.disks if d["device"] == self.disk | ||||
|             ][0] | ||||
|             percent_free = 100 - percent_used | ||||
|  | ||||
|             body = subject + f" - Free: {percent_free}%, Threshold: {self.threshold}%" | ||||
|             body = subject + f" - Free: {percent_free}%, {text}" | ||||
|  | ||||
|         elif self.check_type == "script": | ||||
|  | ||||
| @@ -556,26 +647,29 @@ class Check(BaseAuditModel): | ||||
|             body = self.more_info | ||||
|  | ||||
|         elif self.check_type == "cpuload" or self.check_type == "memory": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             avg = int(mean(self.history)) | ||||
|  | ||||
|             if self.check_type == "cpuload": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average CPU utilization: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average CPU utilization: {avg}%, {text}" | ||||
|  | ||||
|             elif self.check_type == "memory": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average memory usage: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average memory usage: {avg}%, {text}" | ||||
|  | ||||
|         elif self.check_type == "winsvc": | ||||
|  | ||||
|             status = list( | ||||
|                 filter(lambda x: x["name"] == self.svc_name, self.agent.services) | ||||
|             )[0]["status"] | ||||
|             try: | ||||
|                 status = list( | ||||
|                     filter(lambda x: x["name"] == self.svc_name, self.agent.services) | ||||
|                 )[0]["status"] | ||||
|             # catch services that don't exist if policy check | ||||
|             except: | ||||
|                 status = "Unknown" | ||||
|  | ||||
|             body = subject + f" - Status: {status.upper()}" | ||||
|  | ||||
| @@ -601,11 +695,13 @@ class Check(BaseAuditModel): | ||||
|                 except: | ||||
|                     continue | ||||
|  | ||||
|         CORE.send_mail(subject, body) | ||||
|         CORE.send_mail(subject, body, alert_template=alert_template) | ||||
|  | ||||
|     def send_sms(self): | ||||
|  | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         body: str = "" | ||||
|  | ||||
|         if self.agent: | ||||
|             subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed" | ||||
| @@ -613,27 +709,33 @@ class Check(BaseAuditModel): | ||||
|             subject = f"{self} Failed" | ||||
|  | ||||
|         if self.check_type == "diskspace": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             percent_used = [ | ||||
|                 d["percent"] for d in self.agent.disks if d["device"] == self.disk | ||||
|             ][0] | ||||
|             percent_free = 100 - percent_used | ||||
|             body = subject + f" - Free: {percent_free}%, Threshold: {self.threshold}%" | ||||
|             body = subject + f" - Free: {percent_free}%, {text}" | ||||
|         elif self.check_type == "script": | ||||
|             body = subject + f" - Return code: {self.retcode}" | ||||
|         elif self.check_type == "ping": | ||||
|             body = subject | ||||
|         elif self.check_type == "cpuload" or self.check_type == "memory": | ||||
|             text = "" | ||||
|             if self.warning_threshold: | ||||
|                 text += f" Warning Threshold: {self.warning_threshold}%" | ||||
|             if self.error_threshold: | ||||
|                 text += f" Error Threshold: {self.error_threshold}%" | ||||
|  | ||||
|             avg = int(mean(self.history)) | ||||
|             if self.check_type == "cpuload": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average CPU utilization: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average CPU utilization: {avg}%, {text}" | ||||
|             elif self.check_type == "memory": | ||||
|                 body = ( | ||||
|                     subject | ||||
|                     + f" - Average memory usage: {avg}%, Threshold: {self.threshold}%" | ||||
|                 ) | ||||
|                 body = subject + f" - Average memory usage: {avg}%, {text}" | ||||
|         elif self.check_type == "winsvc": | ||||
|             status = list( | ||||
|                 filter(lambda x: x["name"] == self.svc_name, self.agent.services) | ||||
| @@ -642,7 +744,21 @@ class Check(BaseAuditModel): | ||||
|         elif self.check_type == "eventlog": | ||||
|             body = subject | ||||
|  | ||||
|         CORE.send_sms(body) | ||||
|         CORE.send_sms(body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_email(self): | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         body = f"{self} is now back to normal" | ||||
|  | ||||
|         CORE.send_mail(subject, body, alert_template=alert_template) | ||||
|  | ||||
|     def send_resolved_sms(self): | ||||
|         CORE = CoreSettings.objects.first() | ||||
|         alert_template = self.agent.get_alert_template() | ||||
|         subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved" | ||||
|         CORE.send_sms(subject, alert_template=alert_template) | ||||
|  | ||||
|  | ||||
| class CheckHistory(models.Model): | ||||
|   | ||||
| @@ -1,10 +1,11 @@ | ||||
| import validators as _v | ||||
| import pytz | ||||
| import validators as _v | ||||
| from rest_framework import serializers | ||||
|  | ||||
| from .models import Check, CheckHistory | ||||
| from autotasks.models import AutomatedTask | ||||
| from scripts.serializers import ScriptSerializer, ScriptCheckSerializer | ||||
| from scripts.serializers import ScriptCheckSerializer, ScriptSerializer | ||||
|  | ||||
| from .models import Check, CheckHistory | ||||
|  | ||||
|  | ||||
| class AssignedTaskField(serializers.ModelSerializer): | ||||
| @@ -20,6 +21,23 @@ class CheckSerializer(serializers.ModelSerializer): | ||||
|     assigned_task = serializers.SerializerMethodField() | ||||
|     last_run = serializers.ReadOnlyField(source="last_run_as_timezone") | ||||
|     history_info = serializers.ReadOnlyField() | ||||
|     alert_template = serializers.SerializerMethodField() | ||||
|  | ||||
|     def get_alert_template(self, obj): | ||||
|         if obj.agent: | ||||
|             alert_template = obj.agent.get_alert_template() | ||||
|         else: | ||||
|             alert_template = None | ||||
|  | ||||
|         if not alert_template: | ||||
|             return None | ||||
|         else: | ||||
|             return { | ||||
|                 "name": alert_template.name, | ||||
|                 "always_email": alert_template.check_always_email, | ||||
|                 "always_text": alert_template.check_always_text, | ||||
|                 "always_alert": alert_template.check_always_alert, | ||||
|             } | ||||
|  | ||||
|     ## Change to return only array of tasks after 9/25/2020 | ||||
|     def get_assigned_task(self, obj): | ||||
| @@ -40,19 +58,35 @@ class CheckSerializer(serializers.ModelSerializer): | ||||
|             check_type = val["check_type"] | ||||
|         except KeyError: | ||||
|             return val | ||||
|  | ||||
|         # disk checks | ||||
|         # make sure no duplicate diskchecks exist for an agent/policy | ||||
|         if check_type == "diskspace" and not self.instance:  # only on create | ||||
|             checks = ( | ||||
|                 Check.objects.filter(**self.context) | ||||
|                 .filter(check_type="diskspace") | ||||
|                 .exclude(managed_by_policy=True) | ||||
|             ) | ||||
|             for check in checks: | ||||
|                 if val["disk"] in check.disk: | ||||
|                     raise serializers.ValidationError( | ||||
|                         f"A disk check for Drive {val['disk']} already exists!" | ||||
|                     ) | ||||
|         if check_type == "diskspace": | ||||
|             if not self.instance:  # only on create | ||||
|                 checks = ( | ||||
|                     Check.objects.filter(**self.context) | ||||
|                     .filter(check_type="diskspace") | ||||
|                     .exclude(managed_by_policy=True) | ||||
|                 ) | ||||
|                 for check in checks: | ||||
|                     if val["disk"] in check.disk: | ||||
|                         raise serializers.ValidationError( | ||||
|                             f"A disk check for Drive {val['disk']} already exists!" | ||||
|                         ) | ||||
|  | ||||
|             if not val["warning_threshold"] and not val["error_threshold"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold or Error Threshold must be set" | ||||
|                 ) | ||||
|  | ||||
|             if ( | ||||
|                 val["warning_threshold"] < val["error_threshold"] | ||||
|                 and val["warning_threshold"] > 0 | ||||
|                 and val["error_threshold"] > 0 | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold must be greater than Error Threshold" | ||||
|                 ) | ||||
|  | ||||
|         # ping checks | ||||
|         if check_type == "ping": | ||||
| @@ -75,6 +109,20 @@ class CheckSerializer(serializers.ModelSerializer): | ||||
|                     "A cpuload check for this agent already exists" | ||||
|                 ) | ||||
|  | ||||
|             if not val["warning_threshold"] and not val["error_threshold"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold or Error Threshold must be set" | ||||
|                 ) | ||||
|  | ||||
|             if ( | ||||
|                 val["warning_threshold"] > val["error_threshold"] | ||||
|                 and val["warning_threshold"] > 0 | ||||
|                 and val["error_threshold"] > 0 | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold must be less than Error Threshold" | ||||
|                 ) | ||||
|  | ||||
|         if check_type == "memory" and not self.instance: | ||||
|             if ( | ||||
|                 Check.objects.filter(**self.context, check_type="memory") | ||||
| @@ -85,6 +133,20 @@ class CheckSerializer(serializers.ModelSerializer): | ||||
|                     "A memory check for this agent already exists" | ||||
|                 ) | ||||
|  | ||||
|             if not val["warning_threshold"] and not val["error_threshold"]: | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold or Error Threshold must be set" | ||||
|                 ) | ||||
|  | ||||
|             if ( | ||||
|                 val["warning_threshold"] > val["error_threshold"] | ||||
|                 and val["warning_threshold"] > 0 | ||||
|                 and val["error_threshold"] > 0 | ||||
|             ): | ||||
|                 raise serializers.ValidationError( | ||||
|                     f"Warning threshold must be less than Error Threshold" | ||||
|                 ) | ||||
|  | ||||
|         return val | ||||
|  | ||||
|  | ||||
| @@ -118,8 +180,6 @@ class CheckRunnerGetSerializer(serializers.ModelSerializer): | ||||
|             "text_alert", | ||||
|             "fails_b4_alert", | ||||
|             "fail_count", | ||||
|             "email_sent", | ||||
|             "text_sent", | ||||
|             "outage_history", | ||||
|             "extra_details", | ||||
|             "stdout", | ||||
|   | ||||
| @@ -1,57 +1,91 @@ | ||||
| import datetime as dt | ||||
| import random | ||||
| from time import sleep | ||||
| from typing import Union | ||||
|  | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from tacticalrmm.celery import app | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_check_email_alert_task(pk): | ||||
|     from .models import Check | ||||
| def handle_check_email_alert_task(pk, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     check = Check.objects.get(pk=pk) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     if not check.agent.maintenance_mode: | ||||
|         # first time sending email | ||||
|         if not check.email_sent: | ||||
|             sleep(random.randint(1, 10)) | ||||
|             check.send_email() | ||||
|             check.email_sent = djangotime.now() | ||||
|             check.save(update_fields=["email_sent"]) | ||||
|         else: | ||||
|             # send an email only if the last email sent is older than 24 hours | ||||
|             delta = djangotime.now() - dt.timedelta(hours=24) | ||||
|             if check.email_sent < delta: | ||||
|     # first time sending email | ||||
|     if not alert.email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_check.send_email() | ||||
|         alert.email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["email_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send an email only if the last email sent is older than alert interval | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.email_sent < delta: | ||||
|                 sleep(random.randint(1, 10)) | ||||
|                 check.send_email() | ||||
|                 check.email_sent = djangotime.now() | ||||
|                 check.save(update_fields=["email_sent"]) | ||||
|                 alert.assigned_check.send_email() | ||||
|                 alert.email_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_check_sms_alert_task(pk): | ||||
|     from .models import Check | ||||
| def handle_check_sms_alert_task(pk, alert_interval: Union[float, None] = None) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     check = Check.objects.get(pk=pk) | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     if not check.agent.maintenance_mode: | ||||
|         # first time sending text | ||||
|         if not check.text_sent: | ||||
|             sleep(random.randint(1, 3)) | ||||
|             check.send_sms() | ||||
|             check.text_sent = djangotime.now() | ||||
|             check.save(update_fields=["text_sent"]) | ||||
|         else: | ||||
|     # first time sending text | ||||
|     if not alert.sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_check.send_sms() | ||||
|         alert.sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["sms_sent"]) | ||||
|     else: | ||||
|         if alert_interval: | ||||
|             # send a text only if the last text sent is older than 24 hours | ||||
|             delta = djangotime.now() - dt.timedelta(hours=24) | ||||
|             if check.text_sent < delta: | ||||
|             delta = djangotime.now() - dt.timedelta(days=alert_interval) | ||||
|             if alert.sms_sent < delta: | ||||
|                 sleep(random.randint(1, 3)) | ||||
|                 check.send_sms() | ||||
|                 check.text_sent = djangotime.now() | ||||
|                 check.save(update_fields=["text_sent"]) | ||||
|                 alert.assigned_check.send_sms() | ||||
|                 alert.sms_sent = djangotime.now() | ||||
|                 alert.save(update_fields=["sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_check_sms_alert_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending text | ||||
|     if not alert.resolved_sms_sent: | ||||
|         sleep(random.randint(1, 3)) | ||||
|         alert.assigned_check.send_resolved_sms() | ||||
|         alert.resolved_sms_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_sms_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|  | ||||
| @app.task | ||||
| def handle_resolved_check_email_alert_task(pk: int) -> str: | ||||
|     from alerts.models import Alert | ||||
|  | ||||
|     alert = Alert.objects.get(pk=pk) | ||||
|  | ||||
|     # first time sending email | ||||
|     if not alert.resolved_email_sent: | ||||
|         sleep(random.randint(1, 10)) | ||||
|         alert.assigned_check.send_resolved_email() | ||||
|         alert.resolved_email_sent = djangotime.now() | ||||
|         alert.save(update_fields=["resolved_email_sent"]) | ||||
|  | ||||
|     return "ok" | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,13 @@ | ||||
| from checks.models import CheckHistory | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
| from .serializers import CheckSerializer | ||||
| from django.utils import timezone as djangotime | ||||
| from unittest.mock import patch | ||||
|  | ||||
| from django.utils import timezone as djangotime | ||||
| from model_bakery import baker | ||||
|  | ||||
| from checks.models import CheckHistory | ||||
| from tacticalrmm.test import TacticalTestCase | ||||
|  | ||||
| from .serializers import CheckSerializer | ||||
|  | ||||
|  | ||||
| class TestCheckViews(TacticalTestCase): | ||||
|     def setUp(self): | ||||
| @@ -22,8 +24,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|         serializer = CheckSerializer(disk_check) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|         self.check_not_authenticated("post", url) | ||||
|         self.assertEqual(resp.data, serializer.data)  # type: ignore | ||||
|         self.check_not_authenticated("get", url) | ||||
|  | ||||
|     def test_add_disk_check(self): | ||||
|         # setup data | ||||
| @@ -36,7 +38,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "threshold": 55, | ||||
|                 "error_threshold": 55, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
| @@ -50,7 +53,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "threshold": 55, | ||||
|                 "error_threshold": 55, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
| @@ -58,6 +62,38 @@ class TestCheckViews(TacticalTestCase): | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # this should fail because both error and warning threshold are 0 | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # this should fail because both error is greater than warning threshold | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "C:", | ||||
|                 "error_threshold": 50, | ||||
|                 "warning_threshold": 30, | ||||
|                 "fails_b4_alert": 3, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_add_cpuload_check(self): | ||||
|         url = "/checks/checks/" | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
| @@ -65,7 +101,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "cpuload", | ||||
|                 "threshold": 66, | ||||
|                 "error_threshold": 66, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
| @@ -73,7 +110,7 @@ class TestCheckViews(TacticalTestCase): | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         payload["threshold"] = 87 | ||||
|         payload["error_threshold"] = 87 | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|         self.assertEqual( | ||||
| @@ -81,6 +118,36 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "A cpuload check for this agent already exists", | ||||
|         ) | ||||
|  | ||||
|         # should fail because both error and warning thresholds are 0 | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "cpuload", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # should fail because error is less than warning | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "cpuload", | ||||
|                 "error_threshold": 10, | ||||
|                 "warning_threshold": 50, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_add_memory_check(self): | ||||
|         url = "/checks/checks/" | ||||
|         agent = baker.make_recipe("agents.agent") | ||||
| @@ -88,7 +155,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "memory", | ||||
|                 "threshold": 78, | ||||
|                 "error_threshold": 78, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 1, | ||||
|             }, | ||||
|         } | ||||
| @@ -96,7 +164,7 @@ class TestCheckViews(TacticalTestCase): | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         payload["threshold"] = 55 | ||||
|         payload["error_threshold"] = 55 | ||||
|         resp = self.client.post(url, payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|         self.assertEqual( | ||||
| @@ -104,6 +172,34 @@ class TestCheckViews(TacticalTestCase): | ||||
|             "A memory check for this agent already exists", | ||||
|         ) | ||||
|  | ||||
|         # should fail because both error and warning thresholds are 0 | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "memory", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # should fail because error is less than warning | ||||
|         invalid_payload = { | ||||
|             "pk": agent.pk, | ||||
|             "check": { | ||||
|                 "check_type": "memory", | ||||
|                 "error_threshold": 10, | ||||
|                 "warning_threshold": 50, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|     def test_get_policy_disk_check(self): | ||||
|         # setup data | ||||
|         policy = baker.make("automation.Policy") | ||||
| @@ -115,7 +211,7 @@ class TestCheckViews(TacticalTestCase): | ||||
|         serializer = CheckSerializer(disk_check) | ||||
|  | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(resp.data, serializer.data) | ||||
|         self.assertEqual(resp.data, serializer.data)  # type: ignore | ||||
|         self.check_not_authenticated("post", url) | ||||
|  | ||||
|     def test_add_policy_disk_check(self): | ||||
| @@ -125,25 +221,52 @@ class TestCheckViews(TacticalTestCase): | ||||
|         url = "/checks/checks/" | ||||
|  | ||||
|         valid_payload = { | ||||
|             "policy": policy.pk, | ||||
|             "policy": policy.pk,  # type: ignore | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "M:", | ||||
|                 "threshold": 86, | ||||
|                 "error_threshold": 86, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 2, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         # should fail because both error and warning thresholds are 0 | ||||
|         invalid_payload = { | ||||
|             "policy": policy.pk,  # type: ignore | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "error_threshold": 0, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, invalid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 400) | ||||
|  | ||||
|         # should fail because warning is less than error | ||||
|         invalid_payload = { | ||||
|             "policy": policy.pk,  # type: ignore | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "error_threshold": 80, | ||||
|                 "warning_threshold": 50, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.post(url, valid_payload, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         # this should fail because we already have a check for drive M: in setup | ||||
|         invalid_payload = { | ||||
|             "policy": policy.pk, | ||||
|             "policy": policy.pk,  # type: ignore | ||||
|             "check": { | ||||
|                 "check_type": "diskspace", | ||||
|                 "disk": "M:", | ||||
|                 "threshold": 34, | ||||
|                 "error_threshold": 34, | ||||
|                 "warning_threshold": 0, | ||||
|                 "fails_b4_alert": 9, | ||||
|             }, | ||||
|         } | ||||
| @@ -154,8 +277,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|     def test_get_disks_for_policies(self): | ||||
|         url = "/checks/getalldisks/" | ||||
|         r = self.client.get(url) | ||||
|         self.assertIsInstance(r.data, list) | ||||
|         self.assertEqual(26, len(r.data)) | ||||
|         self.assertIsInstance(r.data, list)  # type: ignore | ||||
|         self.assertEqual(26, len(r.data))  # type: ignore | ||||
|  | ||||
|     def test_edit_check_alert(self): | ||||
|         # setup data | ||||
| @@ -238,8 +361,8 @@ class TestCheckViews(TacticalTestCase): | ||||
|         ) | ||||
|  | ||||
|         # need to manually set the date back 35 days | ||||
|         for check_history in check_history_data: | ||||
|             check_history.x = djangotime.now() - djangotime.timedelta(days=35) | ||||
|         for check_history in check_history_data:  # type: ignore | ||||
|             check_history.x = djangotime.now() - djangotime.timedelta(days=35)  # type: ignore | ||||
|             check_history.save() | ||||
|  | ||||
|         # test invalid check pk | ||||
| @@ -252,20 +375,22 @@ class TestCheckViews(TacticalTestCase): | ||||
|         data = {"timeFilter": 30} | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(len(resp.data), 30) | ||||
|         self.assertEqual(len(resp.data), 30)  # type: ignore | ||||
|  | ||||
|         # test with timeFilter equal to 0 | ||||
|         data = {"timeFilter": 0} | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|         self.assertEqual(len(resp.data), 60) | ||||
|         self.assertEqual(len(resp.data), 60)  # type: ignore | ||||
|  | ||||
|         self.check_not_authenticated("patch", url) | ||||
|  | ||||
|  | ||||
| class TestCheckTasks(TacticalTestCase): | ||||
|     def setUp(self): | ||||
|         self.authenticate() | ||||
|         self.setup_coresettings() | ||||
|         self.agent = baker.make_recipe("agents.agent") | ||||
|  | ||||
|     def test_prune_check_history(self): | ||||
|         from .tasks import prune_check_history | ||||
| @@ -280,8 +405,8 @@ class TestCheckTasks(TacticalTestCase): | ||||
|         ) | ||||
|  | ||||
|         # need to manually set the date back 35 days | ||||
|         for check_history in check_history_data: | ||||
|             check_history.x = djangotime.now() - djangotime.timedelta(days=35) | ||||
|         for check_history in check_history_data:  # type: ignore | ||||
|             check_history.x = djangotime.now() - djangotime.timedelta(days=35)  # type: ignore | ||||
|             check_history.save() | ||||
|  | ||||
|         # prune data 30 days old | ||||
| @@ -291,3 +416,694 @@ class TestCheckTasks(TacticalTestCase): | ||||
|         # prune all Check history Data | ||||
|         prune_check_history(0) | ||||
|         self.assertEqual(CheckHistory.objects.count(), 0) | ||||
|  | ||||
|     def test_handle_script_check(self): | ||||
|         from checks.models import Check | ||||
|  | ||||
|         url = "/api/v3/checkrunner/" | ||||
|  | ||||
|         script = baker.make_recipe("checks.script_check", agent=self.agent) | ||||
|  | ||||
|         # test failing | ||||
|         data = { | ||||
|             "id": script.id, | ||||
|             "retcode": 500, | ||||
|             "stderr": "error", | ||||
|             "stdout": "message", | ||||
|             "runtime": 5.000, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=script.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test passing | ||||
|         data = { | ||||
|             "id": script.id, | ||||
|             "retcode": 0, | ||||
|             "stderr": "error", | ||||
|             "stdout": "message", | ||||
|             "runtime": 5.000, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=script.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|         # test failing info | ||||
|         script.info_return_codes = [20, 30, 50] | ||||
|         script.save() | ||||
|  | ||||
|         data = { | ||||
|             "id": script.id, | ||||
|             "retcode": 30, | ||||
|             "stderr": "error", | ||||
|             "stdout": "message", | ||||
|             "runtime": 5.000, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=script.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "info") | ||||
|  | ||||
|         # test failing warning | ||||
|         script.warning_return_codes = [80, 100, 1040] | ||||
|         script.save() | ||||
|  | ||||
|         data = { | ||||
|             "id": script.id, | ||||
|             "retcode": 1040, | ||||
|             "stderr": "error", | ||||
|             "stdout": "message", | ||||
|             "runtime": 5.000, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=script.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|     def test_handle_diskspace_check(self): | ||||
|         from checks.models import Check | ||||
|  | ||||
|         url = "/api/v3/checkrunner/" | ||||
|  | ||||
|         diskspace = baker.make_recipe( | ||||
|             "checks.diskspace_check", | ||||
|             warning_threshold=20, | ||||
|             error_threshold=10, | ||||
|             agent=self.agent, | ||||
|         ) | ||||
|  | ||||
|         # test warning threshold failure | ||||
|         data = { | ||||
|             "id": diskspace.id, | ||||
|             "exists": True, | ||||
|             "percent_used": 85, | ||||
|             "total": 500, | ||||
|             "free": 400, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=diskspace.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|         # test error failure | ||||
|         data = { | ||||
|             "id": diskspace.id, | ||||
|             "exists": True, | ||||
|             "percent_used": 95, | ||||
|             "total": 500, | ||||
|             "free": 400, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=diskspace.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test disk not exist | ||||
|         data = {"id": diskspace.id, "exists": False} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=diskspace.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test warning threshold 0 | ||||
|         diskspace.warning_threshold = 0 | ||||
|         diskspace.save() | ||||
|         data = { | ||||
|             "id": diskspace.id, | ||||
|             "exists": True, | ||||
|             "percent_used": 95, | ||||
|             "total": 500, | ||||
|             "free": 400, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=diskspace.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test error threshold 0 | ||||
|         diskspace.warning_threshold = 50 | ||||
|         diskspace.error_threshold = 0 | ||||
|         diskspace.save() | ||||
|         data = { | ||||
|             "id": diskspace.id, | ||||
|             "exists": True, | ||||
|             "percent_used": 95, | ||||
|             "total": 500, | ||||
|             "free": 400, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=diskspace.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|         # test passing | ||||
|         data = { | ||||
|             "id": diskspace.id, | ||||
|             "exists": True, | ||||
|             "percent_used": 50, | ||||
|             "total": 500, | ||||
|             "free": 400, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=diskspace.id) | ||||
|  | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|     def test_handle_cpuload_check(self): | ||||
|         from checks.models import Check | ||||
|  | ||||
|         url = "/api/v3/checkrunner/" | ||||
|  | ||||
|         cpuload = baker.make_recipe( | ||||
|             "checks.cpuload_check", | ||||
|             warning_threshold=70, | ||||
|             error_threshold=90, | ||||
|             agent=self.agent, | ||||
|         ) | ||||
|  | ||||
|         # test failing warning | ||||
|         data = {"id": cpuload.id, "percent": 80} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=cpuload.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|         # test failing error | ||||
|         data = {"id": cpuload.id, "percent": 95} | ||||
|  | ||||
|         # reset check history | ||||
|         cpuload.history = [] | ||||
|         cpuload.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=cpuload.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test passing | ||||
|         data = {"id": cpuload.id, "percent": 50} | ||||
|  | ||||
|         # reset check history | ||||
|         cpuload.history = [] | ||||
|         cpuload.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=cpuload.id) | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|         # test warning threshold 0 | ||||
|         cpuload.warning_threshold = 0 | ||||
|         cpuload.save() | ||||
|         data = {"id": cpuload.id, "percent": 95} | ||||
|  | ||||
|         # reset check history | ||||
|         cpuload.history = [] | ||||
|         cpuload.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=cpuload.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test error threshold 0 | ||||
|         cpuload.warning_threshold = 50 | ||||
|         cpuload.error_threshold = 0 | ||||
|         cpuload.save() | ||||
|         data = {"id": cpuload.id, "percent": 95} | ||||
|  | ||||
|         # reset check history | ||||
|         cpuload.history = [] | ||||
|         cpuload.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=cpuload.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|     def test_handle_memory_check(self): | ||||
|         from checks.models import Check | ||||
|  | ||||
|         url = "/api/v3/checkrunner/" | ||||
|  | ||||
|         memory = baker.make_recipe( | ||||
|             "checks.memory_check", | ||||
|             warning_threshold=70, | ||||
|             error_threshold=90, | ||||
|             agent=self.agent, | ||||
|         ) | ||||
|  | ||||
|         # test failing warning | ||||
|         data = {"id": memory.id, "percent": 80} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=memory.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|         # test failing error | ||||
|         data = {"id": memory.id, "percent": 95} | ||||
|  | ||||
|         # reset check history | ||||
|         memory.history = [] | ||||
|         memory.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=memory.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test passing | ||||
|         data = {"id": memory.id, "percent": 50} | ||||
|  | ||||
|         # reset check history | ||||
|         memory.history = [] | ||||
|         memory.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=memory.id) | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|         # test warning threshold 0 | ||||
|         memory.warning_threshold = 0 | ||||
|         memory.save() | ||||
|         data = {"id": memory.id, "percent": 95} | ||||
|  | ||||
|         # reset check history | ||||
|         memory.history = [] | ||||
|         memory.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=memory.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test error threshold 0 | ||||
|         memory.warning_threshold = 50 | ||||
|         memory.error_threshold = 0 | ||||
|         memory.save() | ||||
|         data = {"id": memory.id, "percent": 95} | ||||
|  | ||||
|         # reset check history | ||||
|         memory.history = [] | ||||
|         memory.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=memory.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|     def test_handle_ping_check(self): | ||||
|         from checks.models import Check | ||||
|  | ||||
|         url = "/api/v3/checkrunner/" | ||||
|  | ||||
|         ping = baker.make_recipe( | ||||
|             "checks.ping_check", agent=self.agent, alert_severity="info" | ||||
|         ) | ||||
|  | ||||
|         # test failing info | ||||
|         data = { | ||||
|             "id": ping.id, | ||||
|             "output": "Reply from 192.168.1.27: Destination host unreachable", | ||||
|             "has_stdout": True, | ||||
|             "has_stderr": False, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=ping.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "info") | ||||
|  | ||||
|         # test failing warning | ||||
|         data = { | ||||
|             "id": ping.id, | ||||
|             "output": "Reply from 192.168.1.27: Destination host unreachable", | ||||
|             "has_stdout": True, | ||||
|             "has_stderr": False, | ||||
|         } | ||||
|  | ||||
|         ping.alert_severity = "warning" | ||||
|         ping.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=ping.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|  | ||||
|         # test failing error | ||||
|         data = { | ||||
|             "id": ping.id, | ||||
|             "output": "Reply from 192.168.1.27: Destination host unreachable", | ||||
|             "has_stdout": True, | ||||
|             "has_stderr": False, | ||||
|         } | ||||
|  | ||||
|         ping.alert_severity = "error" | ||||
|         ping.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=ping.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test failing error | ||||
|         data = { | ||||
|             "id": ping.id, | ||||
|             "output": "some output", | ||||
|             "has_stdout": False, | ||||
|             "has_stderr": True, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=ping.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test passing | ||||
|         data = { | ||||
|             "id": ping.id, | ||||
|             "output": "Reply from 192.168.1.1: bytes=32 time<1ms TTL=64", | ||||
|             "has_stdout": True, | ||||
|             "has_stderr": False, | ||||
|         } | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=ping.id) | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|     @patch("agents.models.Agent.nats_cmd") | ||||
|     def test_handle_winsvc_check(self, nats_cmd): | ||||
|         from checks.models import Check | ||||
|  | ||||
|         url = "/api/v3/checkrunner/" | ||||
|  | ||||
|         winsvc = baker.make_recipe( | ||||
|             "checks.winsvc_check", agent=self.agent, alert_severity="info" | ||||
|         ) | ||||
|  | ||||
|         # test passing running | ||||
|         data = {"id": winsvc.id, "exists": True, "status": "running"} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|         # test passing start pending | ||||
|         winsvc.pass_if_start_pending = True | ||||
|         winsvc.save() | ||||
|  | ||||
|         data = {"id": winsvc.id, "exists": True, "status": "start_pending"} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|         # test failing no start | ||||
|         data = {"id": winsvc.id, "exists": True, "status": "not running"} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "info") | ||||
|  | ||||
|         # test failing and attempt start | ||||
|         winsvc.restart_if_stopped = True | ||||
|         winsvc.alert_severity = "warning" | ||||
|         winsvc.save() | ||||
|  | ||||
|         nats_cmd.return_value = "timeout" | ||||
|  | ||||
|         data = {"id": winsvc.id, "exists": True, "status": "not running"} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "warning") | ||||
|         nats_cmd.assert_called() | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|         # test failing and attempt start | ||||
|         winsvc.alert_severity = "error" | ||||
|         winsvc.save() | ||||
|         nats_cmd.return_value = {"success": False, "errormsg": "Some Error"} | ||||
|  | ||||
|         data = {"id": winsvc.id, "exists": True, "status": "not running"} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|         self.assertEqual(new_check.alert_severity, "error") | ||||
|         nats_cmd.assert_called() | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|         # test success and attempt start | ||||
|         nats_cmd.return_value = {"success": True} | ||||
|  | ||||
|         data = {"id": winsvc.id, "exists": True, "status": "not running"} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|         nats_cmd.assert_called() | ||||
|         nats_cmd.reset_mock() | ||||
|  | ||||
|         # test failing and service not exist | ||||
|         data = {"id": winsvc.id, "exists": False, "status": ""} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "failing") | ||||
|  | ||||
|         # test success and service not exist | ||||
|         winsvc.pass_if_svc_not_exist = True | ||||
|         winsvc.save() | ||||
|         data = {"id": winsvc.id, "exists": False, "status": ""} | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=winsvc.id) | ||||
|         self.assertEqual(new_check.status, "passing") | ||||
|  | ||||
|     def test_handle_eventlog_check(self): | ||||
|         from checks.models import Check | ||||
|  | ||||
|         url = "/api/v3/checkrunner/" | ||||
|  | ||||
|         eventlog = baker.make_recipe( | ||||
|             "checks.eventlog_check", | ||||
|             event_type="warning", | ||||
|             fail_when="contains", | ||||
|             event_id=123, | ||||
|             alert_severity="warning", | ||||
|             agent=self.agent, | ||||
|         ) | ||||
|  | ||||
|         data = { | ||||
|             "id": eventlog.id, | ||||
|             "log": [ | ||||
|                 { | ||||
|                     "eventType": "warning", | ||||
|                     "eventID": 150, | ||||
|                     "source": "source", | ||||
|                     "message": "a test message", | ||||
|                 }, | ||||
|                 { | ||||
|                     "eventType": "warning", | ||||
|                     "eventID": 123, | ||||
|                     "source": "source", | ||||
|                     "message": "a test message", | ||||
|                 }, | ||||
|                 { | ||||
|                     "eventType": "error", | ||||
|                     "eventID": 123, | ||||
|                     "source": "source", | ||||
|                     "message": "a test message", | ||||
|                 }, | ||||
|             ], | ||||
|         } | ||||
|  | ||||
|         # test failing when contains | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.alert_severity, "warning") | ||||
|         self.assertEquals(new_check.status, "failing") | ||||
|  | ||||
|         # test passing when not contains and message | ||||
|         eventlog.event_message = "doesnt exist" | ||||
|         eventlog.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.status, "passing") | ||||
|  | ||||
|         # test failing when not contains and message and source | ||||
|         eventlog.fail_when = "not_contains" | ||||
|         eventlog.alert_severity = "error" | ||||
|         eventlog.event_message = "doesnt exist" | ||||
|         eventlog.event_source = "doesnt exist" | ||||
|         eventlog.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.status, "failing") | ||||
|         self.assertEquals(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test passing when contains with source and message | ||||
|         eventlog.event_message = "test" | ||||
|         eventlog.event_source = "source" | ||||
|         eventlog.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.status, "passing") | ||||
|  | ||||
|         # test failing with wildcard not contains and source | ||||
|         eventlog.event_id_is_wildcard = True | ||||
|         eventlog.event_source = "doesn't exist" | ||||
|         eventlog.event_message = "" | ||||
|         eventlog.event_id = 0 | ||||
|         eventlog.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.status, "failing") | ||||
|         self.assertEquals(new_check.alert_severity, "error") | ||||
|  | ||||
|         # test passing with wildcard contains | ||||
|         eventlog.event_source = "" | ||||
|         eventlog.event_message = "" | ||||
|         eventlog.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.status, "passing") | ||||
|  | ||||
|         # test failing with wildcard contains and message | ||||
|         eventlog.fail_when = "contains" | ||||
|         eventlog.event_type = "error" | ||||
|         eventlog.alert_severity = "info" | ||||
|         eventlog.event_message = "test" | ||||
|         eventlog.event_source = "" | ||||
|         eventlog.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.status, "failing") | ||||
|         self.assertEquals(new_check.alert_severity, "info") | ||||
|  | ||||
|         # test passing with wildcard not contains message and source | ||||
|         eventlog.event_message = "doesnt exist" | ||||
|         eventlog.event_source = "doesnt exist" | ||||
|         eventlog.save() | ||||
|  | ||||
|         resp = self.client.patch(url, data, format="json") | ||||
|         self.assertEqual(resp.status_code, 200) | ||||
|  | ||||
|         new_check = Check.objects.get(pk=eventlog.id) | ||||
|  | ||||
|         self.assertEquals(new_check.status, "passing") | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| from django.urls import path | ||||
|  | ||||
| from . import views | ||||
|  | ||||
| urlpatterns = [ | ||||
|   | ||||
| @@ -1,31 +1,26 @@ | ||||
| import asyncio | ||||
| from packaging import version as pyver | ||||
|  | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.db.models import Q | ||||
| from django.utils import timezone as djangotime | ||||
|  | ||||
| from datetime import datetime as dt | ||||
|  | ||||
| from rest_framework.views import APIView | ||||
| from rest_framework.response import Response | ||||
| from django.db.models import Q | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.utils import timezone as djangotime | ||||
| from packaging import version as pyver | ||||
| from rest_framework.decorators import api_view | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from tacticalrmm.utils import notify_error | ||||
| from agents.models import Agent | ||||
| from automation.models import Policy | ||||
|  | ||||
| from .models import Check | ||||
| from scripts.models import Script | ||||
|  | ||||
| from .serializers import CheckSerializer, CheckHistorySerializer | ||||
|  | ||||
|  | ||||
| from automation.tasks import ( | ||||
|     generate_agent_checks_from_policies_task, | ||||
|     delete_policy_check_task, | ||||
|     generate_agent_checks_from_policies_task, | ||||
|     update_policy_check_fields_task, | ||||
| ) | ||||
| from scripts.models import Script | ||||
| from tacticalrmm.utils import notify_error | ||||
|  | ||||
| from .models import Check | ||||
| from .serializers import CheckHistorySerializer, CheckSerializer | ||||
|  | ||||
|  | ||||
| class AddCheck(APIView): | ||||
| @@ -64,7 +59,7 @@ class AddCheck(APIView): | ||||
|         if policy: | ||||
|             generate_agent_checks_from_policies_task.delay(policypk=policy.pk) | ||||
|         elif agent: | ||||
|             checks = agent.agentchecks.filter( | ||||
|             checks = agent.agentchecks.filter(  # type: ignore | ||||
|                 check_type=obj.check_type, managed_by_policy=True | ||||
|             ) | ||||
|  | ||||
| @@ -154,7 +149,7 @@ class CheckHistory(APIView): | ||||
|                     - djangotime.timedelta(days=request.data["timeFilter"]), | ||||
|                 ) | ||||
|  | ||||
|         check_history = check.check_history.filter(timeFilter).order_by("-x") | ||||
|         check_history = check.check_history.filter(timeFilter).order_by("-x")  # type: ignore | ||||
|  | ||||
|         return Response( | ||||
|             CheckHistorySerializer( | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from django.contrib import admin | ||||
|  | ||||
| from .models import Client, Site, Deployment | ||||
| from .models import Client, Deployment, Site | ||||
|  | ||||
| admin.site.register(Client) | ||||
| admin.site.register(Site) | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| # Generated by Django 3.0.6 on 2020-05-31 01:23 | ||||
|  | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user